From c81ce0afbb2dd3d5504ebbcac1d478a8ad7c2541 Mon Sep 17 00:00:00 2001 From: Connor Olding Date: Sun, 21 Jan 2018 22:16:36 +0000 Subject: [PATCH] rename stuff and add a couple missing imports --- mnist_example.py | 2 +- onn/__init__.py | 8 ++++---- onn/activation.py | 2 +- onn/{floats.py => float.py} | 0 onn/layer.py | 2 +- onn/layer_base.py | 2 +- onn/learner.py | 2 +- onn/loss.py | 2 +- onn/model.py | 5 +++-- onn/{nodes.py => nodal.py} | 0 onn/optimizer.py | 3 ++- onn/optimizer_base.py | 2 +- onn/parametric.py | 2 +- onn/regularizer.py | 2 +- onn/ritual.py | 2 +- onn/ritual_base.py | 2 +- onn/{util.py => utility.py} | 0 onn/{weights.py => weight.py} | 0 18 files changed, 20 insertions(+), 18 deletions(-) rename onn/{floats.py => float.py} (100%) rename onn/{nodes.py => nodal.py} (100%) rename onn/{util.py => utility.py} (100%) rename onn/{weights.py => weight.py} (100%) diff --git a/mnist_example.py b/mnist_example.py index d1a98ff..7e8300f 100644 --- a/mnist_example.py +++ b/mnist_example.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 from onn import * -from onn.floats import * +from onn.float import * from dotmap import DotMap lower_priority() diff --git a/onn/__init__.py b/onn/__init__.py index 19d6020..437b3a9 100644 --- a/onn/__init__.py +++ b/onn/__init__.py @@ -5,20 +5,20 @@ # this is gonna take some work. from .activation import * -from .floats import * +from .float import * from .initialization import * from .layer import * from .learner import * from .loss import * from .math import * from .model import * -from .nodes import * +from .nodal import * from .optimizer import * from .parametric import * from .regularizer import * from .ritual import * -from .util import * -from .weights import * +from .utility import * +from .weight import * # this is similar to default behaviour of having no __all__ variable at all, # but ours ignores modules as well. this allows for `import sys` and such diff --git a/onn/activation.py b/onn/activation.py index 7bba491..f42165e 100644 --- a/onn/activation.py +++ b/onn/activation.py @@ -3,7 +3,7 @@ import numpy as np # just for speed, not strictly essential: from scipy.special import expit as sigmoid -from .floats import * +from .float import * from .layer_base import * class Identity(Layer): diff --git a/onn/floats.py b/onn/float.py similarity index 100% rename from onn/floats.py rename to onn/float.py diff --git a/onn/layer.py b/onn/layer.py index e1c9342..e3d33bc 100644 --- a/onn/layer.py +++ b/onn/layer.py @@ -1,6 +1,6 @@ from .layer_base import * from .initialization import * -from .floats import * +from .float import * # Nonparametric Layers {{{1 diff --git a/onn/layer_base.py b/onn/layer_base.py index a8cc253..1ef1781 100644 --- a/onn/layer_base.py +++ b/onn/layer_base.py @@ -2,7 +2,7 @@ import numpy as np from collections import defaultdict, OrderedDict -from .weights import * +from .weight import * # used for numbering layers like Keras: _layer_counters = defaultdict(lambda: 0) diff --git a/onn/learner.py b/onn/learner.py index f60dc99..148ab79 100644 --- a/onn/learner.py +++ b/onn/learner.py @@ -1,4 +1,4 @@ -from .floats import * +from .float import * from .optimizer_base import * class Learner: diff --git a/onn/loss.py b/onn/loss.py index 94f8cb1..2faa309 100644 --- a/onn/loss.py +++ b/onn/loss.py @@ -1,6 +1,6 @@ import numpy as np -from .floats import * +from .float import * class Loss: def forward(self, p, y): diff --git a/onn/model.py b/onn/model.py index bc5f68c..ddda277 100644 --- a/onn/model.py +++ b/onn/model.py @@ -1,8 +1,9 @@ import sys -from .floats import * -from .nodes import * +from .float import * +from .nodal import * from .layer_base import * +from .utility import * class Model: def __init__(self, nodes_in, nodes_out, loss=None, mloss=None, unsafe=False): diff --git a/onn/nodes.py b/onn/nodal.py similarity index 100% rename from onn/nodes.py rename to onn/nodal.py diff --git a/onn/optimizer.py b/onn/optimizer.py index 5a0e91c..370f794 100644 --- a/onn/optimizer.py +++ b/onn/optimizer.py @@ -1,7 +1,8 @@ import numpy as np -from .floats import * +from .float import * from .optimizer_base import * +from .utility import * # some of the the following optimizers are blatantly lifted from tiny-dnn: # https://github.com/tiny-dnn/tiny-dnn/blob/master/tiny_dnn/optimizers/optimizer.h diff --git a/onn/optimizer_base.py b/onn/optimizer_base.py index d7251c0..3a90f64 100644 --- a/onn/optimizer_base.py +++ b/onn/optimizer_base.py @@ -1,6 +1,6 @@ import numpy as np -from .floats import * +from .float import * class Optimizer: def __init__(self, lr=0.1): diff --git a/onn/parametric.py b/onn/parametric.py index b1e6560..540b857 100644 --- a/onn/parametric.py +++ b/onn/parametric.py @@ -1,6 +1,6 @@ import numpy as np -from .floats import * +from .float import * from .layer_base import * from .initialization import * diff --git a/onn/regularizer.py b/onn/regularizer.py index b8b1ac7..9a6aebf 100644 --- a/onn/regularizer.py +++ b/onn/regularizer.py @@ -1,6 +1,6 @@ import numpy as np -from .floats import * +from .float import * class Regularizer: pass diff --git a/onn/ritual.py b/onn/ritual.py index 7499e6b..07b4dea 100644 --- a/onn/ritual.py +++ b/onn/ritual.py @@ -1,6 +1,6 @@ import numpy as np -from .floats import * +from .float import * from .initialization import * from .ritual_base import * diff --git a/onn/ritual_base.py b/onn/ritual_base.py index 5362a3a..470a4f6 100644 --- a/onn/ritual_base.py +++ b/onn/ritual_base.py @@ -1,7 +1,7 @@ import types import numpy as np -from .floats import * +from .float import * class Ritual: # i'm just making up names at this point. def __init__(self, learner=None): diff --git a/onn/util.py b/onn/utility.py similarity index 100% rename from onn/util.py rename to onn/utility.py diff --git a/onn/weights.py b/onn/weight.py similarity index 100% rename from onn/weights.py rename to onn/weight.py