diff options
Diffstat (limited to 'nerv/layer/lstm_gate.lua')
-rw-r--r-- | nerv/layer/lstm_gate.lua | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua index e690721..9d79b04 100644 --- a/nerv/layer/lstm_gate.lua +++ b/nerv/layer/lstm_gate.lua @@ -60,18 +60,19 @@ function LSTMGateLayer:back_propagate(bp_err, next_bp_err, input, output) self.err_bakm:sigmoid_grad(bp_err[1], output[1]) for i = 1, #self.dim_in do next_bp_err[i]:mul(self.err_bakm, self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T') + self["ltp" .. i]:back_propagate_by_err_input(self.err_bakm, input[i]) end + self.bp:back_propagate_by_gradient(self.err_bakm:colsum()) end -function LSTMGateLayer:update(bp_err, input, output) - self.err_bakm:sigmoid_grad(bp_err[1], output[1]) +function LSTMGateLayer:update() for i = 1, #self.dim_in do - self["ltp" .. i]:update_by_err_input(self.err_bakm, input[i]) + self["ltp" .. i]:update_by_err_input() if self.param_type[i] == 'D' then self["ltp" .. i].trans:diagonalize() end end - self.bp:update_by_gradient(self.err_bakm:colsum()) + self.bp:update_by_gradient() end function LSTMGateLayer:get_params() |