A Warmup Scheduler in Pytorch to make the learning rate change at the beginning of training for warmup.
Notice: need to install pytorch>=1.1.0 manually.
The official website is PyTorch
Then install as follows:
pip install warmup_scheduler_pytorch
Detail to see GitHub example.py file.
import torch
from torch.optim import SGD # example
from torch.optim.lr_scheduler import CosineAnnealingLR # example
from warmup_scheduler_pytorch import WarmUpScheduler
model = Model()
optimizer = SGD(model.parameters(), lr=0.1)
lr_scheduler = CosineAnnealingLR(optimizer, T_max=100, eta_min=0.01)
data_loader = torch.utils.data.DataLoader(...)
warmup_scheduler = WarmUpScheduler(optimizer, lr_scheduler,
len_loader=len(data_loader),
warmup_steps=100,
warmup_start_lr=0.01,
warmup_mode='linear')
epochs = 100
for epoch in range(epochs):
for batch_data in data_loader:
output = model(...)
# loss = loss_fn(output, ...)
# loss.backward()
optimizer.step()
optimizer.zero_grad()
warmup_scheduler.step()
# lr_scheduler.step() is no longer needed