From 94f27d6f2a3c974028aa3e8bbbdd4ba0a5fa8200 Mon Sep 17 00:00:00 2001 From: Connor Olding Date: Sun, 3 Feb 2019 14:30:34 +0100 Subject: [PATCH] add Adadelta optimizer --- onn/optimizer.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/onn/optimizer.py b/onn/optimizer.py index b432e02..2dbb89f 100644 --- a/onn/optimizer.py +++ b/onn/optimizer.py @@ -62,6 +62,31 @@ class Adagrad(Optimizer): return -self.lr * dW / (np.sqrt(self.g) + self.eps) +class Adadelta(Optimizer): + # paper: https://arxiv.org/abs/1212.5701 + + def __init__(self, lr=1.0, mu=0.95, eps=1e-8): + self.mu = _f(mu) + self.eps = _f(eps) + + super().__init__(lr) + + def reset(self): + self.g = None + self.x = None + + def compute(self, dW, W): + if self.g is None: + self.g = np.zeros_like(dW) + if self.x is None: + self.x = np.zeros_like(dW) + + self.g += (self.mu - 1) * (self.g - np.square(dW)) + delta = dW * np.sqrt(self.x + self.eps) / (np.sqrt(self.g) + self.eps) + self.x += (self.mu - 1) * (self.x - np.square(delta)) + return -self.lr * delta + + class RMSprop(Optimizer): # RMSprop generalizes* Adagrad, etc.