diff --git a/onn.py b/onn.py index 0e04e97..070c6a3 100755 --- a/onn.py +++ b/onn.py @@ -83,14 +83,6 @@ class Confidence(Loss): dmax = p == np.max(p, axis=-1, keepdims=True) return detc * dmax -class NLL(Loss): # Negative Log Likelihood - def forward(self, p, y): - correct = p * y - return np.mean(-correct) - - def backward(self, p, y): - return -y / len(p) - # Regularizers {{{1 class SaturateRelu(Regularizer): diff --git a/onn_core.py b/onn_core.py index e8a948a..8b69dde 100644 --- a/onn_core.py +++ b/onn_core.py @@ -184,6 +184,14 @@ class Weights: class Loss: pass +class NLL(Loss): # Negative Log Likelihood + def forward(self, p, y): + correct = p * y + return np.mean(-correct) + + def backward(self, p, y): + return -y / len(p) + class CategoricalCrossentropy(Loss): # lifted from theano @@ -203,7 +211,7 @@ class CategoricalCrossentropy(Loss): class Accuracy(Loss): # returns percentage of categories correctly predicted. # utilizes argmax(), so it cannot be used for gradient descent. - # use CategoricalCrossentropy for that instead. + # use CategoricalCrossentropy or NLL for that instead. def forward(self, p, y): correct = np.argmax(p, axis=-1) == np.argmax(y, axis=-1)