From a8871d4a67ffa532bffb805a957a040335617418 Mon Sep 17 00:00:00 2001 From: Connor Olding Date: Tue, 17 Mar 2020 07:27:39 -0700 Subject: [PATCH] comment on NLL implementation --- onn/loss.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/onn/loss.py b/onn/loss.py index 0261303..253b431 100644 --- a/onn/loss.py +++ b/onn/loss.py @@ -12,6 +12,10 @@ class Loss: class NLL(Loss): # Negative Log Likelihood + # NOTE: this is a misnomer -- the "log" part is not implemented here. + # instead, you should use a Log activation at the end of your network + # e.g. LogSoftmax. + # TODO: simplify the math that comes about it. def forward(self, p, y): correct = p * y return np.mean(-correct)