fix learning without negate_trials
This commit is contained in:
parent
bcb6cb9da1
commit
d6cc49cde1
2 changed files with 6 additions and 1 deletions
6
main.lua
6
main.lua
|
@ -202,7 +202,11 @@ local function prepare_epoch()
|
|||
end
|
||||
|
||||
local function load_next_trial()
|
||||
if cfg.negate_trials then trial_neg = not trial_neg end
|
||||
if cfg.negate_trials then
|
||||
trial_neg = not trial_neg
|
||||
else
|
||||
trial_neg = true
|
||||
end
|
||||
trial_i = trial_i + 1
|
||||
if trial_i == 0 and not cfg.unperturbed_trial then
|
||||
trial_i = 1
|
||||
|
|
1
nn.lua
1
nn.lua
|
@ -743,6 +743,7 @@ end
|
|||
|
||||
function Model:distribute(W)
|
||||
-- inverse operation of collect().
|
||||
assert(W ~= nil)
|
||||
local i = 0
|
||||
for _, node in ipairs(self.nodes) do
|
||||
for _, w in ipairs(node.weights) do
|
||||
|
|
Loading…
Reference in a new issue