move NLL to core

This commit is contained in:
Connor Olding 2017-08-05 10:59:05 +00:00
parent 0b9c1fe117
commit 910facf98d
2 changed files with 9 additions and 9 deletions

8
onn.py
View File

@ -83,14 +83,6 @@ class Confidence(Loss):
dmax = p == np.max(p, axis=-1, keepdims=True)
return detc * dmax
class NLL(Loss): # Negative Log Likelihood
def forward(self, p, y):
correct = p * y
return np.mean(-correct)
def backward(self, p, y):
return -y / len(p)
# Regularizers {{{1
class SaturateRelu(Regularizer):

View File

@ -184,6 +184,14 @@ class Weights:
class Loss:
pass
class NLL(Loss): # Negative Log Likelihood
def forward(self, p, y):
correct = p * y
return np.mean(-correct)
def backward(self, p, y):
return -y / len(p)
class CategoricalCrossentropy(Loss):
# lifted from theano
@ -203,7 +211,7 @@ class CategoricalCrossentropy(Loss):
class Accuracy(Loss):
# returns percentage of categories correctly predicted.
# utilizes argmax(), so it cannot be used for gradient descent.
# use CategoricalCrossentropy for that instead.
# use CategoricalCrossentropy or NLL for that instead.
def forward(self, p, y):
correct = np.argmax(p, axis=-1) == np.argmax(y, axis=-1)