sczhou commited on
Commit
de58ac1
·
1 Parent(s): 6bb77c4

add lr_scheduler.py

Browse files
Files changed (1) hide show
  1. basicsr/utils/lr_scheduler.py +96 -0
basicsr/utils/lr_scheduler.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ from collections import Counter
3
+ from torch.optim.lr_scheduler import _LRScheduler
4
+
5
+
6
+ class MultiStepRestartLR(_LRScheduler):
7
+ """ MultiStep with restarts learning rate scheme.
8
+
9
+ Args:
10
+ optimizer (torch.nn.optimizer): Torch optimizer.
11
+ milestones (list): Iterations that will decrease learning rate.
12
+ gamma (float): Decrease ratio. Default: 0.1.
13
+ restarts (list): Restart iterations. Default: [0].
14
+ restart_weights (list): Restart weights at each restart iteration.
15
+ Default: [1].
16
+ last_epoch (int): Used in _LRScheduler. Default: -1.
17
+ """
18
+
19
+ def __init__(self, optimizer, milestones, gamma=0.1, restarts=(0, ), restart_weights=(1, ), last_epoch=-1):
20
+ self.milestones = Counter(milestones)
21
+ self.gamma = gamma
22
+ self.restarts = restarts
23
+ self.restart_weights = restart_weights
24
+ assert len(self.restarts) == len(self.restart_weights), 'restarts and their weights do not match.'
25
+ super(MultiStepRestartLR, self).__init__(optimizer, last_epoch)
26
+
27
+ def get_lr(self):
28
+ if self.last_epoch in self.restarts:
29
+ weight = self.restart_weights[self.restarts.index(self.last_epoch)]
30
+ return [group['initial_lr'] * weight for group in self.optimizer.param_groups]
31
+ if self.last_epoch not in self.milestones:
32
+ return [group['lr'] for group in self.optimizer.param_groups]
33
+ return [group['lr'] * self.gamma**self.milestones[self.last_epoch] for group in self.optimizer.param_groups]
34
+
35
+
36
+ def get_position_from_periods(iteration, cumulative_period):
37
+ """Get the position from a period list.
38
+
39
+ It will return the index of the right-closest number in the period list.
40
+ For example, the cumulative_period = [100, 200, 300, 400],
41
+ if iteration == 50, return 0;
42
+ if iteration == 210, return 2;
43
+ if iteration == 300, return 2.
44
+
45
+ Args:
46
+ iteration (int): Current iteration.
47
+ cumulative_period (list[int]): Cumulative period list.
48
+
49
+ Returns:
50
+ int: The position of the right-closest number in the period list.
51
+ """
52
+ for i, period in enumerate(cumulative_period):
53
+ if iteration <= period:
54
+ return i
55
+
56
+
57
+ class CosineAnnealingRestartLR(_LRScheduler):
58
+ """ Cosine annealing with restarts learning rate scheme.
59
+
60
+ An example of config:
61
+ periods = [10, 10, 10, 10]
62
+ restart_weights = [1, 0.5, 0.5, 0.5]
63
+ eta_min=1e-7
64
+
65
+ It has four cycles, each has 10 iterations. At 10th, 20th, 30th, the
66
+ scheduler will restart with the weights in restart_weights.
67
+
68
+ Args:
69
+ optimizer (torch.nn.optimizer): Torch optimizer.
70
+ periods (list): Period for each cosine anneling cycle.
71
+ restart_weights (list): Restart weights at each restart iteration.
72
+ Default: [1].
73
+ eta_min (float): The mimimum lr. Default: 0.
74
+ last_epoch (int): Used in _LRScheduler. Default: -1.
75
+ """
76
+
77
+ def __init__(self, optimizer, periods, restart_weights=(1, ), eta_min=0, last_epoch=-1):
78
+ self.periods = periods
79
+ self.restart_weights = restart_weights
80
+ self.eta_min = eta_min
81
+ assert (len(self.periods) == len(
82
+ self.restart_weights)), 'periods and restart_weights should have the same length.'
83
+ self.cumulative_period = [sum(self.periods[0:i + 1]) for i in range(0, len(self.periods))]
84
+ super(CosineAnnealingRestartLR, self).__init__(optimizer, last_epoch)
85
+
86
+ def get_lr(self):
87
+ idx = get_position_from_periods(self.last_epoch, self.cumulative_period)
88
+ current_weight = self.restart_weights[idx]
89
+ nearest_restart = 0 if idx == 0 else self.cumulative_period[idx - 1]
90
+ current_period = self.periods[idx]
91
+
92
+ return [
93
+ self.eta_min + current_weight * 0.5 * (base_lr - self.eta_min) *
94
+ (1 + math.cos(math.pi * ((self.last_epoch - nearest_restart) / current_period)))
95
+ for base_lr in self.base_lrs
96
+ ]