move NLL to core
This commit is contained in:
parent
0b9c1fe117
commit
910facf98d
2 changed files with 9 additions and 9 deletions
8
onn.py
8
onn.py
|
@ -83,14 +83,6 @@ class Confidence(Loss):
|
||||||
dmax = p == np.max(p, axis=-1, keepdims=True)
|
dmax = p == np.max(p, axis=-1, keepdims=True)
|
||||||
return detc * dmax
|
return detc * dmax
|
||||||
|
|
||||||
class NLL(Loss): # Negative Log Likelihood
|
|
||||||
def forward(self, p, y):
|
|
||||||
correct = p * y
|
|
||||||
return np.mean(-correct)
|
|
||||||
|
|
||||||
def backward(self, p, y):
|
|
||||||
return -y / len(p)
|
|
||||||
|
|
||||||
# Regularizers {{{1
|
# Regularizers {{{1
|
||||||
|
|
||||||
class SaturateRelu(Regularizer):
|
class SaturateRelu(Regularizer):
|
||||||
|
|
10
onn_core.py
10
onn_core.py
|
@ -184,6 +184,14 @@ class Weights:
|
||||||
class Loss:
|
class Loss:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class NLL(Loss): # Negative Log Likelihood
|
||||||
|
def forward(self, p, y):
|
||||||
|
correct = p * y
|
||||||
|
return np.mean(-correct)
|
||||||
|
|
||||||
|
def backward(self, p, y):
|
||||||
|
return -y / len(p)
|
||||||
|
|
||||||
class CategoricalCrossentropy(Loss):
|
class CategoricalCrossentropy(Loss):
|
||||||
# lifted from theano
|
# lifted from theano
|
||||||
|
|
||||||
|
@ -203,7 +211,7 @@ class CategoricalCrossentropy(Loss):
|
||||||
class Accuracy(Loss):
|
class Accuracy(Loss):
|
||||||
# returns percentage of categories correctly predicted.
|
# returns percentage of categories correctly predicted.
|
||||||
# utilizes argmax(), so it cannot be used for gradient descent.
|
# utilizes argmax(), so it cannot be used for gradient descent.
|
||||||
# use CategoricalCrossentropy for that instead.
|
# use CategoricalCrossentropy or NLL for that instead.
|
||||||
|
|
||||||
def forward(self, p, y):
|
def forward(self, p, y):
|
||||||
correct = np.argmax(p, axis=-1) == np.argmax(y, axis=-1)
|
correct = np.argmax(p, axis=-1) == np.argmax(y, axis=-1)
|
||||||
|
|
Loading…
Add table
Reference in a new issue