aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/lstm_gate.lua
diff options
context:
space:
mode:
authorTed Yin <Determinant@users.noreply.github.com>2016-03-29 11:10:20 +0800
committerTed Yin <Determinant@users.noreply.github.com>2016-03-29 11:10:20 +0800
commit9ca106d32bd09c25ce90109714be9ef62397550c (patch)
treee1bb1be0e55a9eb281664238395c77cd071f6d18 /nerv/layer/lstm_gate.lua
parent86dbfcfd490ce3f8fd4591b0950fbea7f1826c70 (diff)
parentc589c3aabaae7f3867bdfed994c8179a87f42675 (diff)
Merge pull request #35 from liuq901/masteralpha-3.2
fix bug of momentum & update mse layer
Diffstat (limited to 'nerv/layer/lstm_gate.lua')
-rw-r--r--nerv/layer/lstm_gate.lua9
1 files changed, 5 insertions, 4 deletions
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua
index e690721..9d79b04 100644
--- a/nerv/layer/lstm_gate.lua
+++ b/nerv/layer/lstm_gate.lua
@@ -60,18 +60,19 @@ function LSTMGateLayer:back_propagate(bp_err, next_bp_err, input, output)
self.err_bakm:sigmoid_grad(bp_err[1], output[1])
for i = 1, #self.dim_in do
next_bp_err[i]:mul(self.err_bakm, self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
+ self["ltp" .. i]:back_propagate_by_err_input(self.err_bakm, input[i])
end
+ self.bp:back_propagate_by_gradient(self.err_bakm:colsum())
end
-function LSTMGateLayer:update(bp_err, input, output)
- self.err_bakm:sigmoid_grad(bp_err[1], output[1])
+function LSTMGateLayer:update()
for i = 1, #self.dim_in do
- self["ltp" .. i]:update_by_err_input(self.err_bakm, input[i])
+ self["ltp" .. i]:update_by_err_input()
if self.param_type[i] == 'D' then
self["ltp" .. i].trans:diagonalize()
end
end
- self.bp:update_by_gradient(self.err_bakm:colsum())
+ self.bp:update_by_gradient()
end
function LSTMGateLayer:get_params()