add fallback to optim.lr in AnnealingLearner

This commit is contained in:
Connor Olding 2017-09-25 06:10:54 +00:00
parent 916c6fe1f0
commit a760c4841b

View File

@ -1271,7 +1271,7 @@ class AnnealingLearner(Learner):
super().__init__(optim, epochs, rate)
def rate_at(self, epoch):
return self.start_rate * self.anneal**epoch
return super().rate_at(epoch) * self.anneal**epoch
def cosmod(x):
# plot: https://www.desmos.com/calculator/hlgqmyswy2