aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn/layersT/lstm_t.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/tnn/layersT/lstm_t.lua')
-rw-r--r--nerv/examples/lmptb/tnn/layersT/lstm_t.lua12
1 files changed, 6 insertions, 6 deletions
diff --git a/nerv/examples/lmptb/tnn/layersT/lstm_t.lua b/nerv/examples/lmptb/tnn/layersT/lstm_t.lua
index 0bd9c76..ded6058 100644
--- a/nerv/examples/lmptb/tnn/layersT/lstm_t.lua
+++ b/nerv/examples/lmptb/tnn/layersT/lstm_t.lua
@@ -108,16 +108,16 @@ function LSTMLayerT:batch_resize(batch_size, chunk_size)
self.dagL:batch_resize(batch_size, chunk_size)
end
-function LSTMLayerT:update(bp_err, input, output)
- self.dagL:update(bp_err, input, output)
+function LSTMLayerT:update(bp_err, input, output, t)
+ self.dagL:update(bp_err, input, output, t)
end
-function LSTMLayerT:propagate(input, output)
- self.dagL:propagate(input, output)
+function LSTMLayerT:propagate(input, output, t)
+ self.dagL:propagate(input, output, t)
end
-function LSTMLayerT:back_propagate(bp_err, next_bp_err, input, output)
- self.dagL:back_propagate(bp_err, next_bp_err, input, output)
+function LSTMLayerT:back_propagate(bp_err, next_bp_err, input, output, t)
+ self.dagL:back_propagate(bp_err, next_bp_err, input, output, t)
end
function LSTMLayerT:get_params()