aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/init.lua9
-rw-r--r--nerv/layer/lstm_gate.lua4
2 files changed, 7 insertions, 6 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index c5b7657..3a6cbcd 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -114,12 +114,13 @@ function Layer:find_param(plist, lconf, gconf, p_type, p_dim, p_gen)
if lconf.pr:has_param(pid) then
return lconf.pr:get_param(pid)
end
+ pid = self.id .. '_' .. pname
+ if lconf.pr:has_param(pid) then
+ nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id)
+ return lconf.pr:get_param(pid)
+ end
end
pid = self.id .. '_' .. plist[1]
- if lconf.pr:has_param(pid) then
- nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id)
- return lconf.pr:get_param(pid)
- end
nerv.info("param id for [%s] of layer [%s] is not found in the specified param repo, " ..
"switch to auto-generate", plist_str, self.id)
local p = p_type(pid, gconf)
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua
index e3b11b4..a3ae797 100644
--- a/nerv/layer/lstm_gate.lua
+++ b/nerv/layer/lstm_gate.lua
@@ -37,12 +37,12 @@ function LSTMGateLayer:init(batch_size)
nerv.error("mismatching dimensions of linear transform parameter and output")
end
self.bp:train_init()
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
function LSTMGateLayer:batch_resize(batch_size)
if self.err_m:nrow() ~= batch_size then
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
end