allow CLRs to use optimizer's learning rate
This commit is contained in:
parent
763246df98
commit
a85ee67780
1 changed files with 2 additions and 2 deletions
|
@ -1377,8 +1377,8 @@ class TriangularCLR(Learner):
|
||||||
return np.abs(((epoch - 1 + offset) % self.frequency) - offset) / offset
|
return np.abs(((epoch - 1 + offset) % self.frequency) - offset) / offset
|
||||||
|
|
||||||
def rate_at(self, epoch):
|
def rate_at(self, epoch):
|
||||||
# NOTE: start_rate is treated as upper_rate
|
upper_rate = self.start_rate if self.start_rate is not None else self.optim.lr
|
||||||
return self._t(epoch) * (self.start_rate - self.lower_rate) + self.lower_rate
|
return self._t(epoch) * (upper_rate - self.lower_rate) + self.lower_rate
|
||||||
|
|
||||||
def next(self):
|
def next(self):
|
||||||
if not super().next():
|
if not super().next():
|
||||||
|
|
Loading…
Reference in a new issue