diff options
-rw-r--r-- | nerv/examples/lmptb/rnn/init.lua | 1 | ||||
-rw-r--r-- | nerv/examples/lmptb/rnn/layers/gate_fff.lua | 9 |
2 files changed, 7 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/rnn/init.lua b/nerv/examples/lmptb/rnn/init.lua index 1370781..6507582 100644 --- a/nerv/examples/lmptb/rnn/init.lua +++ b/nerv/examples/lmptb/rnn/init.lua @@ -43,3 +43,4 @@ end nerv.include('tnn.lua') nerv.include('layersT/softmax_ce_t.lua') +nerv.include('layers/gate_fff.lua') diff --git a/nerv/examples/lmptb/rnn/layers/gate_fff.lua b/nerv/examples/lmptb/rnn/layers/gate_fff.lua index 1010639..751dde1 100644 --- a/nerv/examples/lmptb/rnn/layers/gate_fff.lua +++ b/nerv/examples/lmptb/rnn/layers/gate_fff.lua @@ -59,10 +59,13 @@ function GateFFFLayer:back_propagate(bp_err, next_bp_err, input, output) end function GateFFFLayer:update(bp_err, input, output) - self.ltp:update_by_err_input(bp_err[1], input[1]) - self.bp:update_by_gradient(bp_err[1]:colsum()) + self.err_bakm:sigmoid_grad(bp_err[1], output[1]) + self.ltp1:update_by_err_input(self.err_bakm, input[1]) + self.ltp2:update_by_err_input(self.err_bakm, input[2]) + self.ltp3:update_by_err_input(self.err_bakm, input[3]) + self.bp:update_by_gradient(self.err_bakm:colsum()) end function GateFFFLayer:get_params() - return nerv.ParamRepo({self.ltp, self.bp}) + return nerv.ParamRepo({self.ltp1, self.ltp2, self.ltp3, self.bp}) end |