赞
踩
import torch import torch.nn as nn import itertools import matplotlib.pyplot as plt initial_lr = 0.1 epochs = 100 # 定义一个简单的模型 class model(nn.Module): def __init__(self): super().__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=3, kernel_size=3) def forward(self, x): pass if __name__ == '__main__': net = model() optimizer = torch.optim.Adam(net.parameters(), lr=initial_lr) scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=5) print("初始学习率:", optimizer.defaults['lr']) lr_list = [] for epoch in range(epochs): # train optimizer.zero_grad() optimizer.step() # print("第%d个epoch的学习率:%f" % (epoch, optimizer.param_groups[0]['lr'])) lr_list.append(optimizer.param_groups[0]['lr']) scheduler.step() # lr curve plt.plot(list(range(epochs)), lr_list) plt.xlabel("epoch") plt.ylabel("lr") plt.title("CosineAnnealingLR") plt.show()
torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max, eta_min=0, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=5)
torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0, T_mult=1, eta_min=0, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=5, T_mult=1)
scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=5, T_mult=2)
torch.optim.lr_scheduler.StepLR(optimizer, step_size, gamma=0.1, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30)
torch.optim.lr_scheduler.StepLR(optimizer, step_size, gamma=0.1, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[30,80])
torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma = 0.95)
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode='triangular', step_size_up=10, cycle_momentum=True)
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘triangular’, step_size_up=10, cycle_momentum=True)
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘triangular2’, step_size_up=10, cycle_momentum=True)
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer, base_lr=0.01, max_lr=0.1, mode=‘exp_range’, gamma=0.98, step_size_up=10, cycle_momentum=True)
torch.optim.lr_scheduler.OneCycleLR(optimizer, max_lr, total_steps=None, epochs=None, steps_per_epoch=None, pct_start=0.3, anneal_strategy='cos', cycle_momentum=True, base_momentum=0.85, max_momentum=0.95, div_factor=25.0, final_div_factor=10000.0, three_phase=False, last_epoch=-1, verbose=False)
scheduler = torch.optim.lr_scheduler.OneCycleLR(optimizer, max_lr=0.01, steps_per_epoch=len(data_loader), epochs=epochs)
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。