diff options
Diffstat (limited to 'nerv/examples/lmptb/tnn/layersT/lstm_t.lua')
-rw-r--r-- | nerv/examples/lmptb/tnn/layersT/lstm_t.lua | 50 |
1 files changed, 50 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/layersT/lstm_t.lua b/nerv/examples/lmptb/tnn/layersT/lstm_t.lua new file mode 100644 index 0000000..5b653a3 --- /dev/null +++ b/nerv/examples/lmptb/tnn/layersT/lstm_t.lua @@ -0,0 +1,50 @@ +local LSTMLayerT = nerv.class('nerv.LSTMLayerT', 'nerv.LayerT') + +function LSTMLayerT:__init(id, gilobal_conf, layer_conf) + --input1:x input2:h input3:c + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + + --prepare a DAGLayerT to hold the lstm structure + local paramRepo = nerv.ParamRepo() + local layers = { + ["nerv.IndRecurrentLayer"] = { + ["recurrentL1"] = recurrentLconfig, + }} + + self:check_dim_len(1, 1) -- exactly one input and one output +end + +function LSTMLayerT:init(batch_size) + if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then + nerv.error("mismatching dimensions of linear transform and bias paramter") + end + if self.dim_in[1] ~= self.ltp.trans:nrow() then + nerv.error("mismatching dimensions of linear transform parameter and input") + end + if self.dim_out[1] ~= self.ltp.trans:ncol() then + nerv.error("mismatching dimensions of linear transform parameter and output") + end + self.ltp_grad = self.ltp.trans:create() + self.ltp:train_init() + self.bp:train_init() +end + +function LSTMLayerT:batch_resize(batch_size) + -- do nothing +end + +function LSTMLayerT:update(bp_err, input, output) +end + +function LSTMLayerT:propagate(input, output) +end + +function LSTMLayerT:back_propagate(bp_err, next_bp_err, input, output) +end + +function LSTMLayerT:get_params() + return nerv.ParamRepo({self.ltp, self.bp}) +end |