aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
diff options
context:
space:
mode:
authorQi Liu <liuq901@163.com>2016-05-24 12:28:49 +0800
committerQi Liu <liuq901@163.com>2016-05-24 12:28:49 +0800
commitd58b7adf0acd68921ec2d38d5929bf68406d4982 (patch)
treeea394d86ee95cdf2c10659cc0223943af28777bd /nerv/layer/rnn.lua
parentc0fdb7ee2966546023410bb03e62dee0cf64e0e1 (diff)
parent60b3932a5a50f9ea00d6494340b4e3ff41e8fd6f (diff)
Merge branch 'master' into 'master' beta-1.21
change RNN/LSTM/LSTMP parameter order, which is compatible to old version See merge request !5
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r--nerv/layer/rnn.lua6
1 files changed, 3 insertions, 3 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index 63e0b55..8d5a07c 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -17,7 +17,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local layers = {
['nerv.AffineLayer'] = {
- main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr, activation = layer_conf.activation},
+ main = {dim_in = table.connect(din, {dout}), dim_out = {dout}, pr = pr, activation = layer_conf.activation},
},
['nerv.DuplicateLayer'] = {
duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
@@ -26,11 +26,11 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local connections = {
{'main[1]', 'duplicate[1]', 0},
- {'duplicate[1]', 'main[1]', 1},
+ {'duplicate[1]', 'main[' .. (#din + 1) .. ']', 1},
{'duplicate[2]', '<output>[1]', 0},
}
for i = 1, #din do
- table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. (i + 1) .. ']', 0})
+ table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. i .. ']', 0})
end
self:add_prefix(layers, connections)