shreyasvaidya's picture
Upload folder using huggingface_hub
01bb3bb verified
from torch.optim.lr_scheduler import _LRScheduler
class FixLR(_LRScheduler):
"""Sets the learning rate of each parameter group to the initial lr
decayed by gamma every step_size epochs. When last_epoch=-1, sets
initial lr as lr.
Args:
optimizer (Optimizer): Wrapped optimizer.
step_size (int): Period of learning rate decay.
gamma (float): Multiplicative factor of learning rate decay.
Default: 0.1.
last_epoch (int): The index of last epoch. Default: -1.
Example:
>>> # Fixed leraning rate
>>> scheduler = FixLR(optimizer, step_size=30, gamma=0.1)
>>> for epoch in range(100):
>>> scheduler.step()
>>> train(...)
>>> validate(...)
"""
def __init__(self, optimizer, last_epoch=-1):
super().__init__(optimizer, last_epoch)
def get_lr(self):
return self.base_lrs