From 89a3fa93d571f446bcd1fa69ddd35257d975c239 Mon Sep 17 00:00:00 2001 From: Qi Liu Date: Mon, 9 May 2016 17:57:30 +0800 Subject: merge affine & lstm_gate & projection layer --- nerv/layer/lstm.lua | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) (limited to 'nerv/layer/lstm.lua') diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua index 3de3453..e568ee8 100644 --- a/nerv/layer/lstm.lua +++ b/nerv/layer/lstm.lua @@ -23,16 +23,14 @@ function LSTMLayer:__init(id, global_conf, layer_conf) }, ['nerv.AffineLayer'] = { mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr}, + forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, + inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, + outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, }, ['nerv.TanhLayer'] = { mainTanh = {dim_in = {dout}, dim_out = {dout}}, outputTanh = {dim_in = {dout}, dim_out = {dout}}, }, - ['nerv.LSTMGateLayer'] = { - forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr}, - inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr}, - outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr}, - }, ['nerv.ElemMulLayer'] = { inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, -- cgit v1.2.3 From 10916f721a945a5edd052ab93027413fd3c01f65 Mon Sep 17 00:00:00 2001 From: Qi Liu Date: Thu, 12 May 2016 11:49:44 +0800 Subject: lstm & lstmp layer support multiple input --- nerv/layer/lstm.lua | 53 ++++++++++++++++++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 19 deletions(-) (limited to 'nerv/layer/lstm.lua') diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua index e568ee8..5d73ad2 100644 --- a/nerv/layer/lstm.lua +++ b/nerv/layer/lstm.lua @@ -2,9 +2,12 @@ local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer') function LSTMLayer:__init(id, global_conf, layer_conf) nerv.Layer.__init(self, id, global_conf, layer_conf) - self:check_dim_len(1, 1) + self:check_dim_len(-1, 1) + if #self.dim_in == 0 then + nerv.error('LSTM layer %s has no input', self.id) + end - local din = layer_conf.dim_in[1] + local din = layer_conf.dim_in local dout = layer_conf.dim_out[1] local pr = layer_conf.pr @@ -17,15 +20,17 @@ function LSTMLayer:__init(id, global_conf, layer_conf) mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}}, }, ['nerv.DuplicateLayer'] = { - inputDup = {dim_in = {din}, dim_out = {din, din, din, din}}, outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, }, ['nerv.AffineLayer'] = { - mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr}, - forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, - inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, - outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer}, + mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr}, + forgetGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, + param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, + inputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, + param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, + outputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, + param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, }, ['nerv.TanhLayer'] = { mainTanh = {dim_in = {dout}, dim_out = {dout}}, @@ -37,26 +42,29 @@ function LSTMLayer:__init(id, global_conf, layer_conf) outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, }, } + for i = 1, #din do + layers['nerv.DuplicateLayer']['inputDup' .. i] = {dim_in = {din[i]}, dim_out = {din[i], din[i], din[i], din[i]}} + end local connections = { -- lstm input - {'[1]', 'inputDup[1]', 0}, + --{'[1 .. n]', 'inputDup(1 .. n)[1]', 0}, -- input gate - {'inputDup[1]', 'inputGate[1]', 0}, - {'outputDup[1]', 'inputGate[2]', 1}, - {'cellDup[1]', 'inputGate[3]', 1}, + {'outputDup[1]', 'inputGate[1]', 1}, + {'cellDup[1]', 'inputGate[2]', 1}, + --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0}, -- forget gate - {'inputDup[2]', 'forgetGate[1]', 0}, - {'outputDup[2]', 'forgetGate[2]', 1}, - {'cellDup[2]', 'forgetGate[3]', 1}, + {'outputDup[2]', 'forgetGate[1]', 1}, + {'cellDup[2]', 'forgetGate[2]', 1}, + --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0}, -- lstm cell {'forgetGate[1]', 'forgetGateMul[1]', 0}, {'cellDup[3]', 'forgetGateMul[2]', 1}, - {'inputDup[3]', 'mainAffine[1]', 0}, - {'outputDup[3]', 'mainAffine[2]', 1}, + {'outputDup[3]', 'mainAffine[1]', 1}, + --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0}, {'mainAffine[1]', 'mainTanh[1]', 0}, {'inputGate[1]', 'inputGateMul[1]', 0}, {'mainTanh[1]', 'inputGateMul[2]', 0}, @@ -65,9 +73,9 @@ function LSTMLayer:__init(id, global_conf, layer_conf) {'mainCombine[1]', 'cellDup[1]', 0}, -- forget gate - {'inputDup[4]', 'outputGate[1]', 0}, - {'outputDup[4]', 'outputGate[2]', 1}, - {'cellDup[4]', 'outputGate[3]', 0}, + {'outputDup[4]', 'outputGate[1]', 1}, + {'cellDup[4]', 'outputGate[2]', 0}, + --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0}, -- lstm output {'cellDup[5]', 'outputTanh[1]', 0}, @@ -76,6 +84,13 @@ function LSTMLayer:__init(id, global_conf, layer_conf) {'outputGateMul[1]', 'outputDup[1]', 0}, {'outputDup[5]', '[1]', 0}, } + for i = 1, #din do + table.insert(connections, {'[' .. i .. ']', 'inputDup' .. i .. '[1]', 0}) + table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0}) + end self:add_prefix(layers, connections) local layer_repo = nerv.LayerRepo(layers, pr, global_conf) -- cgit v1.2.3 From 60b3932a5a50f9ea00d6494340b4e3ff41e8fd6f Mon Sep 17 00:00:00 2001 From: Qi Liu Date: Tue, 24 May 2016 12:20:10 +0800 Subject: change RNN/LSTM/LSTMP parameter order, which is compatible to old version --- nerv/layer/lstm.lua | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) (limited to 'nerv/layer/lstm.lua') diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua index 5d73ad2..d6437e5 100644 --- a/nerv/layer/lstm.lua +++ b/nerv/layer/lstm.lua @@ -24,13 +24,13 @@ function LSTMLayer:__init(id, global_conf, layer_conf) cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, }, ['nerv.AffineLayer'] = { - mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr}, - forgetGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - inputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, - outputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout}, - param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer}, + mainAffine = {dim_in = table.connect(din, {dout}), dim_out = {dout}, pr = pr}, + forgetGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + inputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, + outputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, + param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, }, ['nerv.TanhLayer'] = { mainTanh = {dim_in = {dout}, dim_out = {dout}}, @@ -51,20 +51,20 @@ function LSTMLayer:__init(id, global_conf, layer_conf) --{'[1 .. n]', 'inputDup(1 .. n)[1]', 0}, -- input gate - {'outputDup[1]', 'inputGate[1]', 1}, - {'cellDup[1]', 'inputGate[2]', 1}, - --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0}, + {'outputDup[1]', 'inputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[1]', 'inputGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[1]', 'inputGate[1 .. n]', 0}, -- forget gate - {'outputDup[2]', 'forgetGate[1]', 1}, - {'cellDup[2]', 'forgetGate[2]', 1}, - --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0}, + {'outputDup[2]', 'forgetGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[2]', 'forgetGate[' .. (#din + 2) .. ']', 1}, + --{'inputDup(1 .. n)[2]', 'forgetGate[1 .. n]', 0}, -- lstm cell {'forgetGate[1]', 'forgetGateMul[1]', 0}, {'cellDup[3]', 'forgetGateMul[2]', 1}, - {'outputDup[3]', 'mainAffine[1]', 1}, - --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0}, + {'outputDup[3]', 'mainAffine[' .. (#din + 1) .. ']', 1}, + --{'inputDup(1 .. n)[3]', 'mainAffine[1 .. n]', 0}, {'mainAffine[1]', 'mainTanh[1]', 0}, {'inputGate[1]', 'inputGateMul[1]', 0}, {'mainTanh[1]', 'inputGateMul[2]', 0}, @@ -73,9 +73,9 @@ function LSTMLayer:__init(id, global_conf, layer_conf) {'mainCombine[1]', 'cellDup[1]', 0}, -- forget gate - {'outputDup[4]', 'outputGate[1]', 1}, - {'cellDup[4]', 'outputGate[2]', 0}, - --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0}, + {'outputDup[4]', 'outputGate[' .. (#din + 1) .. ']', 1}, + {'cellDup[4]', 'outputGate[' .. (#din + 2) .. ']', 0}, + --{'inputDup(1 .. n)[4]', 'outputGate[1 .. n]', 0}, -- lstm output {'cellDup[5]', 'outputTanh[1]', 0}, @@ -86,10 +86,10 @@ function LSTMLayer:__init(id, global_conf, layer_conf) } for i = 1, #din do table.insert(connections, {'[' .. i .. ']', 'inputDup' .. i .. '[1]', 0}) - table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0}) - table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. i .. ']', 0}) + table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. i .. ']', 0}) end self:add_prefix(layers, connections) -- cgit v1.2.3