diff --git a/onn/activation.py b/onn/activation.py index f06c2ec..4e9dcdc 100644 --- a/onn/activation.py +++ b/onn/activation.py @@ -137,8 +137,7 @@ class Gelu(Activation): class Softmax(Activation): def forward(self, X): - # this alpha term is for numerical stability - # and is not strictly essential. + # the alpha term is just for numerical stability. alpha = np.max(X, axis=-1, keepdims=True) num = np.exp(X - alpha) den = np.sum(num, axis=-1, keepdims=True) @@ -192,6 +191,7 @@ class Selu(Activation): # more class TanhTest(Activation): + """preserves the variance of inputs drawn from the standard normal distribution.""" def forward(self, X): self.sig = np.tanh(1 / 2 * X) return 2.4004 * self.sig diff --git a/onn/utility.py b/onn/utility.py index 7e6111c..274aff1 100644 --- a/onn/utility.py +++ b/onn/utility.py @@ -22,7 +22,7 @@ def lower_priority(): except ImportError: lament("you do not have pywin32 installed.") lament("the process priority could not be lowered.") - lament("consider: python -m pip install pypiwin32") + lament("consider: python -m pip install pywin32") lament("consider: conda install pywin32") else: import os