aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn/layersT/lstm.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/tnn/layersT/lstm.lua')
-rw-r--r--nerv/examples/lmptb/tnn/layersT/lstm.lua56
1 files changed, 56 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/layersT/lstm.lua b/nerv/examples/lmptb/tnn/layersT/lstm.lua
new file mode 100644
index 0000000..0da1f38
--- /dev/null
+++ b/nerv/examples/lmptb/tnn/layersT/lstm.lua
@@ -0,0 +1,56 @@
+local LSTMLayerT = nerv.class('nerv.LSTMLayerT', 'nerv.LayerT')
+
+function LSTMLayerT:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+
+ --prepare a DAGLayerT to hold the lstm structure
+ local paramRepo = nerv.ParamRepo()
+ local layers = {
+ ["nerv.IndRecurrentLayer"] = {
+ ["recurrentL1"] = recurrentLconfig,
+ }}
+
+ self:check_dim_len(1, 1) -- exactly one input and one output
+end
+
+function LSTMLayerT:init(batch_size)
+ if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform and bias paramter")
+ end
+ if self.dim_in[1] ~= self.ltp.trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self.ltp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+ self.ltp_grad = self.ltp.trans:create()
+ self.ltp:train_init()
+ self.bp:train_init()
+end
+
+function LSTMLayerT:batch_resize(batch_size)
+ -- do nothing
+end
+
+function AffineLayer:update(bp_err, input, output)
+ self.ltp:update_by_err_input(bp_err[1], input[1])
+ self.bp:update_by_gradient(bp_err[1]:colsum())
+end
+
+function AffineLayer:propagate(input, output)
+ -- apply linear transform
+ output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
+ -- add bias
+ output[1]:add_row(self.bp.trans, 1.0)
+end
+
+function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
+end
+
+function AffineLayer:get_params()
+ return nerv.ParamRepo({self.ltp, self.bp})
+end