allow optimizers to adjust their own learning rate

This commit is contained in:
Connor Olding 2017-07-02 02:52:07 +00:00
parent 22dc651cce
commit 1b1184480a
2 changed files with 9 additions and 6 deletions

View file

@ -314,9 +314,8 @@ class RMSprop(Optimizer):
class Adam(Optimizer):
# paper: https://arxiv.org/abs/1412.6980
# Adam generalizes* RMSprop, and
# adds a decay term to the regular (non-squared) delta, and
# does some decay-gain voodoo. (i guess it's compensating
# for the filtered deltas starting from zero)
# adds a decay term to the regular (non-squared) delta, and performs
# debiasing to compensate for the filtered deltas starting from zero.
# * Adam == RMSprop when
# Adam.b1 == 0
@ -1072,7 +1071,7 @@ class Learner:
def __init__(self, optim, epochs=100, rate=None):
assert isinstance(optim, Optimizer)
self.optim = optim
self.start_rate = optim.alpha if rate is None else _f(rate)
self.start_rate = rate # None is okay; it'll use optim.alpha instead.
self.epochs = int(epochs)
self.reset()
@ -1101,6 +1100,8 @@ class Learner:
self.optim.alpha = new_rate
def rate_at(self, epoch):
if self.start_rate is None:
return self.optim.alpha
return self.start_rate
def next(self):

View file

@ -136,10 +136,12 @@ if learner_class == SGDR:
learner = learner_class(optim, epochs=epochs//starts, rate=lr,
restarts=starts-1, restart_decay=restart_decay,
expando=lambda i:0)
else:
assert learner_class in (TriangularCLR, SineCLR, WaveCLR)
elif learner_class in (TriangularCLR, SineCLR, WaveCLR):
learner = learner_class(optim, epochs=epochs, lower_rate=0, upper_rate=lr,
frequency=epochs//starts)
else:
lament('NOTE: no learning rate schedule selected.')
learner = Learner(optim, epochs=epochs)
loss = CategoricalCrossentropy()
mloss = Accuracy()