add a linear (identity) activation for good measure

This commit is contained in:
Connor Olding 2017-07-25 04:24:32 +00:00
parent 6933e21e0e
commit 93547b1974

View File

@ -680,6 +680,13 @@ class Dropout(Layer):
# Activation Layers {{{2
class Linear(Layer):
def forward(self, X):
return X
def backward(self, dY):
return dY
class Sigmoid(Layer): # aka Logistic, Expit (inverse of Logit)
def forward(self, X):
self.sig = sigmoid(X)