diff options
Diffstat (limited to 'nerv/layer/lstm_gate.lua')
-rw-r--r-- | nerv/layer/lstm_gate.lua | 77 |
1 files changed, 77 insertions, 0 deletions
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua new file mode 100644 index 0000000..1963eba --- /dev/null +++ b/nerv/layer/lstm_gate.lua @@ -0,0 +1,77 @@ +local LSTMGateLayer = nerv.class('nerv.LSTMGateLayer', 'nerv.Layer') +-- NOTE: this is a full matrix gate + +function LSTMGateLayer:__init(id, global_conf, layer_conf) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + + for i = 1, #self.dim_in do + self["ltp" .. i] = self:find_param("ltp" .. i, layer_conf, global_conf, + nerv.LinearTransParam, + {self.dim_in[i], self.dim_out[1]}) + end + self.bp = self:find_param("bp", layer_conf, global_conf, + nerv.BiasParam, {1, self.dim_out[1]}) + + self:check_dim_len(-1, 1) --accept multiple inputs +end + +function LSTMGateLayer:init(batch_size) + for i = 1, #self.dim_in do + if self["ltp" .. i].trans:ncol() ~= self.bp.trans:ncol() then + nerv.error("mismatching dimensions of linear transform and bias paramter") + end + if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then + nerv.error("mismatching dimensions of linear transform parameter and input") + end + self["ltp"..i]:train_init() + end + + if self.dim_out[1] ~= self.ltp1.trans:ncol() then + nerv.error("mismatching dimensions of linear transform parameter and output") + end + self.bp:train_init() + self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1]) +end + +function LSTMGateLayer:batch_resize(batch_size) + if self.err_m:nrow() ~= batch_size then + self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1]) + end +end + +function LSTMGateLayer:propagate(input, output) + -- apply linear transform + output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N') + for i = 2, #self.dim_in do + output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N') + end + -- add bias + output[1]:add_row(self.bp.trans, 1.0) + output[1]:sigmoid(output[1]) +end + +function LSTMGateLayer:back_propagate(bp_err, next_bp_err, input, output) + self.err_bakm:sigmoid_grad(bp_err[1], output[1]) + for i = 1, #self.dim_in do + next_bp_err[i]:mul(self.err_bakm, self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T') + end +end + +function LSTMGateLayer:update(bp_err, input, output) + self.err_bakm:sigmoid_grad(bp_err[1], output[1]) + for i = 1, #self.dim_in do + self["ltp" .. i]:update_by_err_input(self.err_bakm, input[i]) + end + self.bp:update_by_gradient(self.err_bakm:colsum()) +end + +function LSTMGateLayer:get_params() + local pr = nerv.ParamRepo({self.bp}) + for i = 1, #self.dim_in do + pr:add(self["ltp" .. i].id, self["ltp" .. i]) + end + return pr +end |