move LayerNorm after Relu

This commit is contained in:
Connor Olding 2018-05-07 16:22:48 +02:00
parent e3a8a6b87f
commit 7357c8ed62

View File

@ -233,12 +233,12 @@ local function make_network(input_size)
nn_ty:feed(nn_y)
nn_y = nn_y:feed(nn.Dense(128))
if cfg.layernorm then nn_y = nn_y:feed(nn.LayerNorm()) end
if cfg.deterministic then
nn_y = nn_y:feed(nn.Relu())
else
nn_y = nn_y:feed(nn.Gelu())
end
if cfg.layernorm then nn_y = nn_y:feed(nn.LayerNorm()) end
nn_z = nn_y
nn_z = nn_z:feed(nn.Dense(#gcfg.jp_lut))