-
Notifications
You must be signed in to change notification settings - Fork 1
/
schedulers.py
136 lines (113 loc) · 4.62 KB
/
schedulers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import math
from torch import nn, optim, Tensor
def _triangular_f(it:int, ss:int, min_lr:float, max_lr:float):
'TODO: docstring'
# calculate number of completed cycles
cyc = math.floor(it / (ss * 2))
# calculate number of completed iterations in current cycle
it_cyc = it - (cyc * 2 * ss)
# calculate distance from lr_max iteration
mid_dist = math.fabs(it_cyc - ss)
# scale lr difference
scalar = mid_dist / ss
return min_lr + (1 - scalar) * (max_lr - min_lr)
def _triangular2_f(it:int, ss:int, min_lr:float, max_lr:float):
'TODO: docstring'
# calculate number of completed cycles
cyc = math.floor(it / (ss * 2))
# calculate number of completed iterations in current cycle
it_cyc = it - (cyc * 2 * ss)
# calculate distance from lr_max iteration
mid_dist = math.fabs(it_cyc - ss)
# scale lr difference
scalar = mid_dist / ss
return min_lr + (1 - scalar) * ((max_lr - min_lr) / 2 ** cyc)
def _decay_f(it:int, ss:int, min_lr:float, max_lr:float):
'TODO: docstring'
if it > ss:
return min_lr
diff = max_lr - min_lr
scalar = it / ss
return max_lr - (scalar * diff)
def _exp_f(it:int, gamma:float, initial_lr:float):
'TODO: docstring'
return initial_lr * gamma ** it
def _exp_range_f(it:int, gamma:float, ss:int, min_lr:float, max_lr:float):
'TODO: docstring'
return _triangular_f(it, ss, min_lr * (gamma ** it), max_lr * (gamma ** it))
class FixedScheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, fixed_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.fixed_lr = fixed_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
# _LRScheduler increments `last_epoch` on each call to `step()`
lr = self.fixed_lr
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
class TriangularScheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, step_size:int, min_lr:float, max_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.step_size = step_size
self.min_lr = min_lr
self.max_lr = max_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
it = self.last_epoch
lr = _triangular_f(it, self.step_size, self.min_lr, self.max_lr)
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
class Triangular2Scheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, step_size:int, min_lr:float, max_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.step_size = step_size
self.min_lr = min_lr
self.max_lr = max_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
it = self.last_epoch
lr = _triangular2_f(it, self.step_size, self.min_lr, self.max_lr)
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
class ExpRangeScheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, gamma:float, step_size:int, min_lr:float, max_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.gamma = gamma
self.step_size = step_size
self.min_lr = min_lr
self.max_lr = max_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
it = self.last_epoch
lr = _exp_range_f(it, self.gamma, self.step_size, self.min_lr, self.max_lr)
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
class DecayScheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, step_size:int, min_lr:float, max_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.step_size = step_size
self.min_lr = min_lr
self.max_lr = max_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
it = self.last_epoch
lr = _decay_f(it, self.step_size, self.min_lr, self.max_lr)
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]
class ExpScheduler(optim.lr_scheduler._LRScheduler):
'TODO: docstring'
def __init__(self, gamma:float, initial_lr:float, optimizer:optim.Optimizer):
'TODO: docstring'
self.gamma = gamma
self.initial_lr = initial_lr
super().__init__(optimizer)
def get_lr(self):
'TODO: docstring'
it = self.last_epoch
lr = _exp_f(it, self.gamma, self.initial_lr)
return [lr * pg.get('lr_mult', 1) for pg in self.optimizer.param_groups]