当前位置:   article > 正文

PyTorch学习率调整可视化 lr_scheduler_lr_scheduler.name

lr_scheduler.name

import torch
import torch.nn as nn
import itertools
import matplotlib.pyplot as plt

initial_lr = 0.1
epochs = 100

# 定义一个简单的模型
class model(nn.Module):
    def __init__(self):
        super().__init__()
        self.conv1 = nn.Conv2d(in_channels=3, out_channels=3, kernel_size=3)

    def forward(self, x):
        pass
        
if __name__ == '__main__':
    net = model()
    optimizer = torch.optim.Adam(net.parameters(), lr=initial_lr)
    scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=5)
    
    print("初始学习率:", optimizer.defaults['lr'])
    lr_list = []  
    for epoch in range(epochs):
        # train
        optimizer.zero_grad()
        optimizer.step()
        # print("第%d个epoch的学习率:%f" % (epoch, optimizer.param_groups[0]['lr']))
        lr_list.append(optimizer.param_groups[0]['lr'])
        scheduler.step()
    
    # lr curve
    plt.plot(list(range(epochs)), lr_list)
    plt.xlabel("epoch")
    plt.ylabel("lr")
    plt.title("CosineAnnealingLR")
    plt.show()

  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39

torch.optim.lr_scheduler.CosineAnnealingLR

torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max, eta_min=0, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=5)在这里插入图片描述

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts

torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0, T_mult=1, eta_min=0, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=5, T_mult=1)在这里插入图片描述
scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=5, T_mult=2)
在这里插入图片描述

torch.optim.lr_scheduler.StepLR

torch.optim.lr_scheduler.StepLR(optimizer, step_size, gamma=0.1, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30)
在这里插入图片描述

torch.optim.lr_scheduler.MultiStepLR

torch.optim.lr_scheduler.StepLR(optimizer, step_size, gamma=0.1, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[30,80])
在这里插入图片描述

torch.optim.lr_scheduler.ExponentialLR

torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma = 0.95)在这里插入图片描述

torch.optim.lr_scheduler.CyclicLR

scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode='triangular', step_size_up=10, cycle_momentum=True)
  • 1

scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘triangular’, step_size_up=10, cycle_momentum=True)在这里插入图片描述
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘triangular2’, step_size_up=10, cycle_momentum=True)
在这里插入图片描述
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘exp_range’, gamma=0.98, step_size_up=10, cycle_momentum=True)
在这里插入图片描述

torch.optim.lr_scheduler.OneCycleLR

torch.optim.lr_scheduler.OneCycleLR(optimizer, max_lr, total_steps=None, epochs=None, steps_per_epoch=None, pct_start=0.3, anneal_strategy='cos', cycle_momentum=True, base_momentum=0.85, max_momentum=0.95, div_factor=25.0, final_div_factor=10000.0, three_phase=False, last_epoch=-1, verbose=False)
  • 1

scheduler = torch.optim.lr_scheduler.OneCycleLR(optimizer, max_lr=0.01, steps_per_epoch=len(data_loader), epochs=epochs)在这里插入图片描述

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/Monodyee/article/detail/328468
推荐阅读
相关标签
  

闽ICP备14008679号