aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/lstm_gate.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-04-18 12:38:25 +0800
committerDeterminant <ted.sybil@gmail.com>2016-04-18 12:38:25 +0800
commitc73636ba680cdb5b57602a1876a75c110b43d426 (patch)
tree25a36588f066bfb850c5d0a81c5c40f01b8def56 /nerv/layer/lstm_gate.lua
parentb15dbc05b08008327b88a62d7f90ef9250182e9b (diff)
support SeqBuffer decoding; fix bugs in finding paramsalpha-4
Diffstat (limited to 'nerv/layer/lstm_gate.lua')
-rw-r--r--nerv/layer/lstm_gate.lua4
1 files changed, 2 insertions, 2 deletions
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua
index e3b11b4..a3ae797 100644
--- a/nerv/layer/lstm_gate.lua
+++ b/nerv/layer/lstm_gate.lua
@@ -37,12 +37,12 @@ function LSTMGateLayer:init(batch_size)
nerv.error("mismatching dimensions of linear transform parameter and output")
end
self.bp:train_init()
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
function LSTMGateLayer:batch_resize(batch_size)
if self.err_m:nrow() ~= batch_size then
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
end