diff options
author | Determinant <ted.sybil@gmail.com> | 2016-03-16 17:53:39 +0800 |
---|---|---|
committer | Determinant <ted.sybil@gmail.com> | 2016-03-16 17:53:39 +0800 |
commit | 289ac7f4b6e88b935da5c891e1efcf91fc047403 (patch) | |
tree | d4fc3a4fc20f2d5908624b3f6587ecd57966d719 /nerv/layer/rnn.lua | |
parent | 07fc1e2794027d44c255e1062c4491346b101a08 (diff) |
merge seq_buffer and change asr_trainer.lua accordingly
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r-- | nerv/layer/rnn.lua | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua index 0b5ccaa..333be9e 100644 --- a/nerv/layer/rnn.lua +++ b/nerv/layer/rnn.lua @@ -20,7 +20,7 @@ function RNNLayer:__init(id, global_conf, layer_conf) ['nerv.AffineLayer'] = { main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr}, }, - [layers.activation] = { + [layer_conf.activation] = { activation = {dim_in = {dout}, dim_out = {dout}}, }, ['nerv.DuplicateLayer'] = { |