diff options
author | Determinant <[email protected]> | 2016-04-18 12:38:25 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2016-04-18 12:38:25 +0800 |
commit | c73636ba680cdb5b57602a1876a75c110b43d426 (patch) | |
tree | 25a36588f066bfb850c5d0a81c5c40f01b8def56 /nerv/layer | |
parent | b15dbc05b08008327b88a62d7f90ef9250182e9b (diff) |
support SeqBuffer decoding; fix bugs in finding paramsalpha-4
Diffstat (limited to 'nerv/layer')
-rw-r--r-- | nerv/layer/init.lua | 9 | ||||
-rw-r--r-- | nerv/layer/lstm_gate.lua | 4 |
2 files changed, 7 insertions, 6 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index c5b7657..3a6cbcd 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -114,12 +114,13 @@ function Layer:find_param(plist, lconf, gconf, p_type, p_dim, p_gen) if lconf.pr:has_param(pid) then return lconf.pr:get_param(pid) end + pid = self.id .. '_' .. pname + if lconf.pr:has_param(pid) then + nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id) + return lconf.pr:get_param(pid) + end end pid = self.id .. '_' .. plist[1] - if lconf.pr:has_param(pid) then - nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id) - return lconf.pr:get_param(pid) - end nerv.info("param id for [%s] of layer [%s] is not found in the specified param repo, " .. "switch to auto-generate", plist_str, self.id) local p = p_type(pid, gconf) diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua index e3b11b4..a3ae797 100644 --- a/nerv/layer/lstm_gate.lua +++ b/nerv/layer/lstm_gate.lua @@ -37,12 +37,12 @@ function LSTMGateLayer:init(batch_size) nerv.error("mismatching dimensions of linear transform parameter and output") end self.bp:train_init() - self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1]) + self.err_bakm = self.mat_type(batch_size, self.dim_out[1]) end function LSTMGateLayer:batch_resize(batch_size) if self.err_m:nrow() ~= batch_size then - self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1]) + self.err_bakm = self.mat_type(batch_size, self.dim_out[1]) end end |