aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/affine_recurrent.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/affine_recurrent.lua')
-rw-r--r--nerv/layer/affine_recurrent.lua52
1 files changed, 19 insertions, 33 deletions
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
index d537f4a..fd6f38f 100644
--- a/nerv/layer/affine_recurrent.lua
+++ b/nerv/layer/affine_recurrent.lua
@@ -9,31 +9,37 @@ function Recurrent:__init(id, global_conf, layer_conf)
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
+ self.log_pre = self.id .. "[LOG]"
self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]}) --layer_conf.bp
- self.ltp_hh = self:find_param("ltp_hh", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[2], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
+ self.ltp_hh = self:find_param("ltphh", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[2], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
+ self.ltp_ih = self:find_param("ltpih", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[1], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
self:check_dim_len(2, 1)
self.direct_update = layer_conf.direct_update
self.clip = layer_conf.clip --clip error in back_propagate
+ if self.clip ~= nil then
+ nerv.info("%s creating, will clip the error by %f", self.log_pre, self.clip)
+ end
end
--Check parameter
function Recurrent:init(batch_size)
- if (self.ltp_hh.trans:ncol() ~= self.bp.trans:ncol()) then
+ if self.ltp_hh.trans:ncol() ~= self.bp.trans:ncol() or
+ self.ltp_ih.trans:ncol() ~= self.bp.trans:ncol() then
nerv.error("mismatching dimensions of ltp and bp")
end
- if (self.dim_in[1] ~= self.ltp_hh.trans:nrow() or
- self.dim_in[2] ~= self.ltp_hh.trans:nrow()) then
+ if self.dim_in[1] ~= self.ltp_ih.trans:nrow() or
+ self.dim_in[2] ~= self.ltp_hh.trans:nrow() then
nerv.error("mismatching dimensions of ltp and input")
end
if (self.dim_out[1] ~= self.bp.trans:ncol()) then
nerv.error("mismatching dimensions of bp and output")
end
- self.ltp_hh_grad = self.ltp_hh.trans:create()
self.ltp_hh:train_init()
+ self.ltp_ih:train_init()
self.bp:train_init()
end
@@ -42,39 +48,19 @@ function Recurrent:batch_resize(batch_size)
end
function Recurrent:update(bp_err, input, output)
- if self.direct_update == true then
- local ltp_hh = self.ltp_hh.trans
- local bp = self.bp.trans
- local gconf = self.gconf
- if (gconf.momentum > 0) then
- -- momentum gain
- local mmt_gain = 1.0 / (1.0 - gconf.momentum)
- local n = input[1]:nrow() * mmt_gain
- -- update corrections (accumulated errors)
- self.ltp_hh.correction:mul(input[2], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
- self.bp.correction:add(self.bp.correction, bp_err[1]:colsum(), gconf.momentum, 1.0)
- -- perform update and weight decay
- ltp_hh:add(ltp_hh, self.ltp_hh.correction, 1.0 - gconf.lrate * gconf.wcost / gconf.batch_size, - gconf.lrate / n)
- bp:add(bp, self.bp.correction, 1.0 - gconf.lrate * gconf.wcost / gconf.batch_size, - gconf.lrate / n)
- else
- ltp_hh:mul(input[2], bp_err[1], - gconf.lrate / gconf.batch_size, 1.0 - gconf.wcost * gconf.lrate / gconf.batch_size, 'T', 'N')
- bp:add(bp, bp_err[1]:colsum(), 1.0 - gconf.lrate * gconf.wcost / gconf.batch_size, - gconf.lrate / gconf.batch_size)
- end
- else
- --self.ltp_hh_grad:mul(input[2], bp_err[1], 1.0, 0.0, 'T', 'N')
- self.ltp_hh:update_by_err_input(bp_err[1], input[2])
- self.bp:update_by_gradient(bp_err[1]:colsum())
- end
+ self.ltp_ih:update_by_err_input(bp_err[1], input[1])
+ self.ltp_hh:update_by_err_input(bp_err[1], input[2])
+ self.bp:update_by_gradient(bp_err[1]:colsum())
end
function Recurrent:propagate(input, output)
- output[1]:copy_fromd(input[1])
+ output[1]:mul(input[1], self.ltp_ih.trans, 1.0, 0.0, 'N', 'N')
output[1]:mul(input[2], self.ltp_hh.trans, 1.0, 1.0, 'N', 'N')
output[1]:add_row(self.bp.trans, 1.0)
end
function Recurrent:back_propagate(bp_err, next_bp_err, input, output)
- next_bp_err[1]:copy_fromd(bp_err[1])
+ next_bp_err[1]:mul(bp_err[1], self.ltp_ih.trans, 1.0, 0.0, 'N', 'T')
next_bp_err[2]:mul(bp_err[1], self.ltp_hh.trans, 1.0, 0.0, 'N', 'T')
--[[
for i = 0, next_bp_err[2]:nrow() - 1 do
@@ -84,11 +70,11 @@ function Recurrent:back_propagate(bp_err, next_bp_err, input, output)
end
end
]]--
- if (self.clip ~= nil) then
- next_bp_err[2]:clip(- self.clip, self.clip)
+ if self.clip ~= nil then
+ next_bp_err[2]:clip(-self.clip, self.clip)
end
end
function Recurrent:get_params()
- return nerv.ParamRepo({self.ltp_hh, self.bp})
+ return nerv.ParamRepo({self.ltp_ih, self.ltp_hh, self.bp})
end