add a linear (identity) activation for good measure
This commit is contained in:
parent
6933e21e0e
commit
93547b1974
|
@ -680,6 +680,13 @@ class Dropout(Layer):
|
|||
|
||||
# Activation Layers {{{2
|
||||
|
||||
class Linear(Layer):
|
||||
def forward(self, X):
|
||||
return X
|
||||
|
||||
def backward(self, dY):
|
||||
return dY
|
||||
|
||||
class Sigmoid(Layer): # aka Logistic, Expit (inverse of Logit)
|
||||
def forward(self, X):
|
||||
self.sig = sigmoid(X)
|
||||
|
|
Loading…
Reference in New Issue
Block a user