aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/init.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/init.lua')
-rw-r--r--nerv/layer/init.lua6
1 files changed, 5 insertions, 1 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 43c2250..6b7a1d7 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -90,7 +90,7 @@ function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim)
end
end
nerv.info("param [%s] of layer [%s] is not found in `layer_conf` or `layer_conf.pr`, " ..
- "switch to auto-generate.", pid_list_str, self.id)
+ "switch to auto-generate", pid_list_str, self.id)
local pid_g = self.id .. '_' .. pid_list[1]
p = p_type(pid_g, gconf)
p.trans = gconf.cumat_type(unpack(p_dim))
@@ -113,3 +113,7 @@ nerv.include('affine_recurrent.lua')
nerv.include('softmax.lua')
nerv.include('elem_mul.lua')
nerv.include('gate_fff.lua')
+nerv.include('lstm.lua')
+nerv.include('lstm_gate.lua')
+nerv.include('dropout.lua')
+nerv.include('gru.lua')