add optimizer hacks

This commit is contained in:
Connor Olding 2019-03-22 12:59:33 +01:00
parent 2e80f8b1a7
commit bf4ec2ec94

View File

@ -59,13 +59,15 @@ class Ritual: # i'm just making up names at this point.
if self.learner.per_batch:
self.learner.batch(b / batch_count)
error, predicted = self.model.forward(inputs, outputs)
error += self.model.regulate_forward()
loss, predicted = self.model.forward(inputs, outputs)
reg_loss = self.model.regulate_forward()
self.model.backward(predicted, outputs)
self.model.regulate()
optim = self.learner.optim
optim.model = self.model
optim.model = self.model # TODO: optim.inform(model=model) or something
optim.error = predicted - outputs # FIXME: temp
optim.loss = loss # FIXME: temp
optim.update(self.model.dW, self.model.W)
return predicted