use updated filenames.
don't use emnist by default.
tweak expando integer handling.
add some comments.
This commit is contained in:
Connor Olding 2017-06-26 00:16:51 +00:00
parent a770444199
commit c02fba01e2
3 changed files with 9 additions and 7 deletions

4
onn.py
View file

@ -6,8 +6,8 @@
# BIG TODO: ensure numpy isn't upcasting to float64 *anywhere*.
# this is gonna take some work.
from optim_nn_core import *
from optim_nn_core import _check, _f, _0, _1
from onn_core import *
from onn_core import _check, _f, _0, _1
import sys

View file

@ -825,6 +825,7 @@ class Model:
for k, v in used.items():
if not v:
# FIXME: lament undeclared without optim_nn.py!
lament("WARNING: unused weight", k)
def save_weights(self, fn, overwrite=False):
@ -844,6 +845,7 @@ class Model:
data[:] = target.f
counts[key] += 1
if counts[key] > 1:
# FIXME: lament undeclared without optim_nn.py!
lament("WARNING: rewrote weight", key)
f.close()
@ -1084,7 +1086,7 @@ def cosmod(x):
class SGDR(Learner):
# Stochastic Gradient Descent with Restarts
# paper: https://arxiv.org/abs/1608.03983
# NOTE: this is missing a couple features.
# NOTE: this is missing a couple of the proposed features.
per_batch = True
@ -1099,7 +1101,7 @@ class SGDR(Learner):
self.expando = expando if expando is not None else lambda i: i
if type(self.expando) == int:
inc = self.expando
self.expando = self.expando = lambda i: inc
self.expando = lambda i: i * inc
self.splits = []
epochs = 0

View file

@ -1,11 +1,11 @@
#!/usr/bin/env python3
from optim_nn import *
from optim_nn_core import _f
from onn import *
from onn_core import _f
#np.random.seed(42069)
use_emnist = True
use_emnist = False
measure_every_epoch = True