add optimizer hacks
This commit is contained in:
parent
2e80f8b1a7
commit
bf4ec2ec94
1 changed files with 5 additions and 3 deletions
|
@ -59,13 +59,15 @@ class Ritual: # i'm just making up names at this point.
|
|||
if self.learner.per_batch:
|
||||
self.learner.batch(b / batch_count)
|
||||
|
||||
error, predicted = self.model.forward(inputs, outputs)
|
||||
error += self.model.regulate_forward()
|
||||
loss, predicted = self.model.forward(inputs, outputs)
|
||||
reg_loss = self.model.regulate_forward()
|
||||
self.model.backward(predicted, outputs)
|
||||
self.model.regulate()
|
||||
|
||||
optim = self.learner.optim
|
||||
optim.model = self.model
|
||||
optim.model = self.model # TODO: optim.inform(model=model) or something
|
||||
optim.error = predicted - outputs # FIXME: temp
|
||||
optim.loss = loss # FIXME: temp
|
||||
optim.update(self.model.dW, self.model.W)
|
||||
|
||||
return predicted
|
||||
|
|
Loading…
Reference in a new issue