update comments

This commit is contained in:
Connor Olding 2019-03-22 12:52:44 +01:00
parent e2530c17e5
commit 0aba113cb7
2 changed files with 3 additions and 3 deletions

View file

@ -137,8 +137,7 @@ class Gelu(Activation):
class Softmax(Activation):
def forward(self, X):
# this alpha term is for numerical stability
# and is not strictly essential.
# the alpha term is just for numerical stability.
alpha = np.max(X, axis=-1, keepdims=True)
num = np.exp(X - alpha)
den = np.sum(num, axis=-1, keepdims=True)
@ -192,6 +191,7 @@ class Selu(Activation):
# more
class TanhTest(Activation):
"""preserves the variance of inputs drawn from the standard normal distribution."""
def forward(self, X):
self.sig = np.tanh(1 / 2 * X)
return 2.4004 * self.sig

View file

@ -22,7 +22,7 @@ def lower_priority():
except ImportError:
lament("you do not have pywin32 installed.")
lament("the process priority could not be lowered.")
lament("consider: python -m pip install pypiwin32")
lament("consider: python -m pip install pywin32")
lament("consider: conda install pywin32")
else:
import os