diff options
author | Qi Liu <[email protected]> | 2016-05-24 12:20:10 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-05-24 12:20:10 +0800 |
commit | 60b3932a5a50f9ea00d6494340b4e3ff41e8fd6f (patch) | |
tree | ea394d86ee95cdf2c10659cc0223943af28777bd /nerv | |
parent | c0fdb7ee2966546023410bb03e62dee0cf64e0e1 (diff) |
change RNN/LSTM/LSTMP parameter order, which is compatible to old version
Diffstat (limited to 'nerv')
-rw-r--r-- | nerv/layer/lstm.lua | 44 | ||||
-rw-r--r-- | nerv/layer/lstmp.lua | 44 | ||||
-rw-r--r-- | nerv/layer/rnn.lua | 6 |
3 files changed, 47 insertions, 47 deletions
diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua index 5d73ad2..d6437e5 100644 --- a/nerv/layer/lstm.lua +++ b/nerv/layer/lstm.lua @@ -24,13 +24,13 @@ function LSTMLayer:__init(id, global_conf, layer_conf) cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, }, ['nerv.AffineLayer'] = { - mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr}, - forgetGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - inputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - outputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, + mainAffine = {dim_in = table.connect(din, {dout}), dim_out = {dout}, pr = pr}, + forgetGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + inputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + outputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, }, ['nerv.TanhLayer'] = { mainTanh = {dim_in = {dout}, dim_out = {dout}}, @@ -51,20 +51,20 @@ function LSTMLayer:__init(id, global_conf, layer_conf) --{'<input>[1 .. n]', 'inputDup(1 .. n)[1]', 0}, -- input gate - {'outputDup[1]', 'inputGate[1]', 1}, - {'cellDup[1]', 'inputGate[2]', 1}, - --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0}, + {'outputDup[1]', 'inputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[1]', 'inputGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[1]', 'inputGate[1 .. n]', 0}, -- forget gate - {'outputDup[2]', 'forgetGate[1]', 1}, - {'cellDup[2]', 'forgetGate[2]', 1}, - --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0}, + {'outputDup[2]', 'forgetGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[2]', 'forgetGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[2]', 'forgetGate[1 .. n]', 0}, -- lstm cell {'forgetGate[1]', 'forgetGateMul[1]', 0}, {'cellDup[3]', 'forgetGateMul[2]', 1}, - {'outputDup[3]', 'mainAffine[1]', 1}, - --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0}, + {'outputDup[3]', 'mainAffine[' .. (#din + 1) .. ']', 1}, + --{'inputDup(1 .. n)[3]', 'mainAffine[1 .. n]', 0}, {'mainAffine[1]', 'mainTanh[1]', 0}, {'inputGate[1]', 'inputGateMul[1]', 0}, {'mainTanh[1]', 'inputGateMul[2]', 0}, @@ -73,9 +73,9 @@ function LSTMLayer:__init(id, global_conf, layer_conf) {'mainCombine[1]', 'cellDup[1]', 0}, -- forget gate - {'outputDup[4]', 'outputGate[1]', 1}, - {'cellDup[4]', 'outputGate[2]', 0}, - --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0}, + {'outputDup[4]', 'outputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[4]', 'outputGate[' .. (#din + 2) .. ']', 0}, + --{'inputDup(1 .. n)[4]', 'outputGate[1 .. n]', 0}, -- lstm output {'cellDup[5]', 'outputTanh[1]', 0}, @@ -86,10 +86,10 @@ function LSTMLayer:__init(id, global_conf, layer_conf) } for i = 1, #din do table.insert(connections, {'<input>[' .. i .. ']', 'inputDup' .. i .. '[1]', 0}) - table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. i .. ']', 0}) end self:add_prefix(layers, connections) diff --git a/nerv/layer/lstmp.lua b/nerv/layer/lstmp.lua index 49c9516..c16dbd4 100644 --- a/nerv/layer/lstmp.lua +++ b/nerv/layer/lstmp.lua @@ -25,13 +25,13 @@ function LSTMPLayer:__init(id, global_conf, layer_conf) cellDup = {dim_in = {dcell}, dim_out = {dcell, dcell, dcell, dcell, dcell}}, }, ['nerv.AffineLayer'] = { - mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dcell}, pr = pr}, - forgetGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - inputGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - outputGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, + mainAffine = {dim_in = table.connect(din, {dout}), dim_out = {dcell}, pr = pr}, + forgetGate = {dim_in = table.connect(din, {dout, dcell}), dim_out = {dcell}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + inputGate = {dim_in = table.connect(din, {dout, dcell}), dim_out = {dcell}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + outputGate = {dim_in = table.connect(din, {dout, dcell}), dim_out = {dcell}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, projection = {dim_in = {dcell}, dim_out = {dout}, pr = pr, no_bias = true}, }, ['nerv.TanhLayer'] = { @@ -53,20 +53,20 @@ function LSTMPLayer:__init(id, global_conf, layer_conf) --{'<input>[1 .. n]', 'inputDup(1 .. n)[1]', 0}, -- input gate - {'outputDup[1]', 'inputGate[1]', 1}, - {'cellDup[1]', 'inputGate[2]', 1}, - --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0}, + {'outputDup[1]', 'inputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[1]', 'inputGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[1]', 'inputGate[1 .. n]', 0}, -- forget gate - {'outputDup[2]', 'forgetGate[1]', 1}, - {'cellDup[2]', 'forgetGate[2]', 1}, - --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0}, + {'outputDup[2]', 'forgetGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[2]', 'forgetGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[2]', 'forgetGate[1 .. n]', 0}, -- lstm cell {'forgetGate[1]', 'forgetGateMul[1]', 0}, {'cellDup[3]', 'forgetGateMul[2]', 1}, - {'outputDup[3]', 'mainAffine[1]', 1}, - --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0}, + {'outputDup[3]', 'mainAffine[' .. (#din + 1) .. ']', 1}, + --{'inputDup(1 .. n)[3]', 'mainAffine[1 .. n]', 0}, {'mainAffine[1]', 'mainTanh[1]', 0}, {'inputGate[1]', 'inputGateMul[1]', 0}, {'mainTanh[1]', 'inputGateMul[2]', 0}, @@ -75,9 +75,9 @@ function LSTMPLayer:__init(id, global_conf, layer_conf) {'mainCombine[1]', 'cellDup[1]', 0}, -- forget gate - {'outputDup[4]', 'outputGate[1]', 1}, - {'cellDup[4]', 'outputGate[2]', 0}, - --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0}, + {'outputDup[4]', 'outputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[4]', 'outputGate[' .. (#din + 2) .. ']', 0}, + --{'inputDup(1 .. n)[4]', 'outputGate[1 .. n]', 0}, -- lstm output {'cellDup[5]', 'outputTanh[1]', 0}, @@ -89,10 +89,10 @@ function LSTMPLayer:__init(id, global_conf, layer_conf) } for i = 1, #din do table.insert(connections, {'<input>[' .. i .. ']', 'inputDup' .. i .. '[1]', 0}) - table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. i .. ']', 0}) end self:add_prefix(layers, connections) diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua index 63e0b55..8d5a07c 100644 --- a/nerv/layer/rnn.lua +++ b/nerv/layer/rnn.lua @@ -17,7 +17,7 @@ function RNNLayer:__init(id, global_conf, layer_conf) local layers = { ['nerv.AffineLayer'] = { - main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr, activation = layer_conf.activation}, + main = {dim_in = table.connect(din, {dout}), dim_out = {dout}, pr = pr, activation = layer_conf.activation}, }, ['nerv.DuplicateLayer'] = { duplicate = {dim_in = {dout}, dim_out = {dout, dout}}, @@ -26,11 +26,11 @@ function RNNLayer:__init(id, global_conf, layer_conf) local connections = { {'main[1]', 'duplicate[1]', 0}, - {'duplicate[1]', 'main[1]', 1}, + {'duplicate[1]', 'main[' .. (#din + 1) .. ']', 1}, {'duplicate[2]', '<output>[1]', 0}, } for i = 1, #din do - table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. (i + 1) .. ']', 0}) + table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. i .. ']', 0}) end self:add_prefix(layers, connections) |