aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua')
-rw-r--r--nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua80
1 files changed, 80 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua b/nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua
new file mode 100644
index 0000000..fd6f38f
--- /dev/null
+++ b/nerv/examples/lmptb/lmptb/layer/affine_recurrent.lua
@@ -0,0 +1,80 @@
+local Recurrent = nerv.class('nerv.AffineRecurrentLayer', 'nerv.Layer')
+
+--id: string
+--global_conf: table
+--layer_conf: table
+--Get Parameters
+function Recurrent:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self.log_pre = self.id .. "[LOG]"
+
+ self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]}) --layer_conf.bp
+ self.ltp_hh = self:find_param("ltphh", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[2], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
+ self.ltp_ih = self:find_param("ltpih", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[1], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
+
+ self:check_dim_len(2, 1)
+ self.direct_update = layer_conf.direct_update
+
+ self.clip = layer_conf.clip --clip error in back_propagate
+ if self.clip ~= nil then
+ nerv.info("%s creating, will clip the error by %f", self.log_pre, self.clip)
+ end
+end
+
+--Check parameter
+function Recurrent:init(batch_size)
+ if self.ltp_hh.trans:ncol() ~= self.bp.trans:ncol() or
+ self.ltp_ih.trans:ncol() ~= self.bp.trans:ncol() then
+ nerv.error("mismatching dimensions of ltp and bp")
+ end
+ if self.dim_in[1] ~= self.ltp_ih.trans:nrow() or
+ self.dim_in[2] ~= self.ltp_hh.trans:nrow() then
+ nerv.error("mismatching dimensions of ltp and input")
+ end
+ if (self.dim_out[1] ~= self.bp.trans:ncol()) then
+ nerv.error("mismatching dimensions of bp and output")
+ end
+
+ self.ltp_hh:train_init()
+ self.ltp_ih:train_init()
+ self.bp:train_init()
+end
+
+function Recurrent:batch_resize(batch_size)
+ -- do nothing
+end
+
+function Recurrent:update(bp_err, input, output)
+ self.ltp_ih:update_by_err_input(bp_err[1], input[1])
+ self.ltp_hh:update_by_err_input(bp_err[1], input[2])
+ self.bp:update_by_gradient(bp_err[1]:colsum())
+end
+
+function Recurrent:propagate(input, output)
+ output[1]:mul(input[1], self.ltp_ih.trans, 1.0, 0.0, 'N', 'N')
+ output[1]:mul(input[2], self.ltp_hh.trans, 1.0, 1.0, 'N', 'N')
+ output[1]:add_row(self.bp.trans, 1.0)
+end
+
+function Recurrent:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:mul(bp_err[1], self.ltp_ih.trans, 1.0, 0.0, 'N', 'T')
+ next_bp_err[2]:mul(bp_err[1], self.ltp_hh.trans, 1.0, 0.0, 'N', 'T')
+ --[[
+ for i = 0, next_bp_err[2]:nrow() - 1 do
+ for j = 0, next_bp_err[2]:ncol() - 1 do
+ if (next_bp_err[2][i][j] > 10) then next_bp_err[2][i][j] = 10 end
+ if (next_bp_err[2][i][j] < -10) then next_bp_err[2][i][j] = -10 end
+ end
+ end
+ ]]--
+ if self.clip ~= nil then
+ next_bp_err[2]:clip(-self.clip, self.clip)
+ end
+end
+
+function Recurrent:get_params()
+ return nerv.ParamRepo({self.ltp_ih, self.ltp_hh, self.bp})
+end