aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorQi Liu <[email protected]>2016-05-12 11:49:44 +0800
committerQi Liu <[email protected]>2016-05-12 11:49:44 +0800
commit10916f721a945a5edd052ab93027413fd3c01f65 (patch)
treecd90688b8aee2991a78f971e1bc4e0e9596e357b
parent313ab19b004603f10c282c5ae035e36e3437ea1f (diff)
lstm & lstmp layer support multiple input
-rw-r--r--nerv/layer/lstm.lua53
-rw-r--r--nerv/layer/lstmp.lua55
-rw-r--r--nerv/layer/rnn.lua15
3 files changed, 72 insertions, 51 deletions
diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua
index e568ee8..5d73ad2 100644
--- a/nerv/layer/lstm.lua
+++ b/nerv/layer/lstm.lua
@@ -2,9 +2,12 @@ local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer')
function LSTMLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
- self:check_dim_len(1, 1)
+ self:check_dim_len(-1, 1)
+ if #self.dim_in == 0 then
+ nerv.error('LSTM layer %s has no input', self.id)
+ end
- local din = layer_conf.dim_in[1]
+ local din = layer_conf.dim_in
local dout = layer_conf.dim_out[1]
local pr = layer_conf.pr
@@ -17,15 +20,17 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}},
},
['nerv.DuplicateLayer'] = {
- inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
},
['nerv.AffineLayer'] = {
- mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
- forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
- inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
- outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
+ mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
+ forgetGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
+ inputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
+ outputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
},
['nerv.TanhLayer'] = {
mainTanh = {dim_in = {dout}, dim_out = {dout}},
@@ -37,26 +42,29 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
},
}
+ for i = 1, #din do
+ layers['nerv.DuplicateLayer']['inputDup' .. i] = {dim_in = {din[i]}, dim_out = {din[i], din[i], din[i], din[i]}}
+ end
local connections = {
-- lstm input
- {'<input>[1]', 'inputDup[1]', 0},
+ --{'<input>[1 .. n]', 'inputDup(1 .. n)[1]', 0},
-- input gate
- {'inputDup[1]', 'inputGate[1]', 0},
- {'outputDup[1]', 'inputGate[2]', 1},
- {'cellDup[1]', 'inputGate[3]', 1},
+ {'outputDup[1]', 'inputGate[1]', 1},
+ {'cellDup[1]', 'inputGate[2]', 1},
+ --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0},
-- forget gate
- {'inputDup[2]', 'forgetGate[1]', 0},
- {'outputDup[2]', 'forgetGate[2]', 1},
- {'cellDup[2]', 'forgetGate[3]', 1},
+ {'outputDup[2]', 'forgetGate[1]', 1},
+ {'cellDup[2]', 'forgetGate[2]', 1},
+ --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0},
-- lstm cell
{'forgetGate[1]', 'forgetGateMul[1]', 0},
{'cellDup[3]', 'forgetGateMul[2]', 1},
- {'inputDup[3]', 'mainAffine[1]', 0},
- {'outputDup[3]', 'mainAffine[2]', 1},
+ {'outputDup[3]', 'mainAffine[1]', 1},
+ --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0},
{'mainAffine[1]', 'mainTanh[1]', 0},
{'inputGate[1]', 'inputGateMul[1]', 0},
{'mainTanh[1]', 'inputGateMul[2]', 0},
@@ -65,9 +73,9 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
{'mainCombine[1]', 'cellDup[1]', 0},
-- forget gate
- {'inputDup[4]', 'outputGate[1]', 0},
- {'outputDup[4]', 'outputGate[2]', 1},
- {'cellDup[4]', 'outputGate[3]', 0},
+ {'outputDup[4]', 'outputGate[1]', 1},
+ {'cellDup[4]', 'outputGate[2]', 0},
+ --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0},
-- lstm output
{'cellDup[5]', 'outputTanh[1]', 0},
@@ -76,6 +84,13 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
{'outputGateMul[1]', 'outputDup[1]', 0},
{'outputDup[5]', '<output>[1]', 0},
}
+ for i = 1, #din do
+ table.insert(connections, {'<input>[' .. i .. ']', 'inputDup' .. i .. '[1]', 0})
+ table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0})
+ end
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
diff --git a/nerv/layer/lstmp.lua b/nerv/layer/lstmp.lua
index dc30797..49c9516 100644
--- a/nerv/layer/lstmp.lua
+++ b/nerv/layer/lstmp.lua
@@ -2,9 +2,12 @@ local LSTMPLayer = nerv.class('nerv.LSTMPLayer', 'nerv.GraphLayer')
function LSTMPLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
- self:check_dim_len(1, 1)
+ self:check_dim_len(-1, 1)
+ if #self.dim_in == 0 then
+ nerv.error('LSTMP layer %s has no input', self.id)
+ end
- local din = layer_conf.dim_in[1]
+ local din = layer_conf.dim_in
local dcell = layer_conf.cell_dim
local dout = layer_conf.dim_out[1]
@@ -18,15 +21,17 @@ function LSTMPLayer:__init(id, global_conf, layer_conf)
mainCombine = {dim_in = {dcell, dcell}, dim_out = {dcell}, lambda = {1, 1}},
},
['nerv.DuplicateLayer'] = {
- inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
cellDup = {dim_in = {dcell}, dim_out = {dcell, dcell, dcell, dcell, dcell}},
},
['nerv.AffineLayer'] = {
- mainAffine = {dim_in = {din, dout}, dim_out = {dcell}, pr = pr},
- forgetGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
- inputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
- outputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr, activation = nerv.SigmoidLayer},
+ mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dcell}, pr = pr},
+ forgetGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
+ inputGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
+ outputGate = {dim_in = table.connect({dout, dcell}, din), dim_out = {dcell},
+ param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
projection = {dim_in = {dcell}, dim_out = {dout}, pr = pr, no_bias = true},
},
['nerv.TanhLayer'] = {
@@ -39,26 +44,29 @@ function LSTMPLayer:__init(id, global_conf, layer_conf)
outputGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}},
},
}
-
+ for i = 1, #din do
+ layers['nerv.DuplicateLayer']['inputDup' .. i] = {dim_in = {din[i]}, dim_out = {din[i], din[i], din[i], din[i]}}
+ end
+
local connections = {
-- lstm input
- {'<input>[1]', 'inputDup[1]', 0},
+ --{'<input>[1 .. n]', 'inputDup(1 .. n)[1]', 0},
-- input gate
- {'inputDup[1]', 'inputGate[1]', 0},
- {'outputDup[1]', 'inputGate[2]', 1},
- {'cellDup[1]', 'inputGate[3]', 1},
+ {'outputDup[1]', 'inputGate[1]', 1},
+ {'cellDup[1]', 'inputGate[2]', 1},
+ --{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0},
-- forget gate
- {'inputDup[2]', 'forgetGate[1]', 0},
- {'outputDup[2]', 'forgetGate[2]', 1},
- {'cellDup[2]', 'forgetGate[3]', 1},
+ {'outputDup[2]', 'forgetGate[1]', 1},
+ {'cellDup[2]', 'forgetGate[2]', 1},
+ --{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0},
-- lstm cell
{'forgetGate[1]', 'forgetGateMul[1]', 0},
{'cellDup[3]', 'forgetGateMul[2]', 1},
- {'inputDup[3]', 'mainAffine[1]', 0},
- {'outputDup[3]', 'mainAffine[2]', 1},
+ {'outputDup[3]', 'mainAffine[1]', 1},
+ --{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0},
{'mainAffine[1]', 'mainTanh[1]', 0},
{'inputGate[1]', 'inputGateMul[1]', 0},
{'mainTanh[1]', 'inputGateMul[2]', 0},
@@ -67,9 +75,9 @@ function LSTMPLayer:__init(id, global_conf, layer_conf)
{'mainCombine[1]', 'cellDup[1]', 0},
-- forget gate
- {'inputDup[4]', 'outputGate[1]', 0},
- {'outputDup[4]', 'outputGate[2]', 1},
- {'cellDup[4]', 'outputGate[3]', 0},
+ {'outputDup[4]', 'outputGate[1]', 1},
+ {'cellDup[4]', 'outputGate[2]', 0},
+ --{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0},
-- lstm output
{'cellDup[5]', 'outputTanh[1]', 0},
@@ -79,6 +87,13 @@ function LSTMPLayer:__init(id, global_conf, layer_conf)
{'projection[1]', 'outputDup[1]', 0},
{'outputDup[5]', '<output>[1]', 0},
}
+ for i = 1, #din do
+ table.insert(connections, {'<input>[' .. i .. ']', 'inputDup' .. i .. '[1]', 0})
+ table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0})
+ table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0})
+ end
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index fd6e753..63e0b55 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -4,12 +4,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1)
if #self.dim_in == 0 then
- nerv.error('RNN Layer %s has no input', self.id)
- end
-
- self.activation = layer_conf.activation
- if self.activation == nil then
- self.activation = 'nerv.SigmoidLayer'
+ nerv.error('RNN layer %s has no input', self.id)
end
local din = layer_conf.dim_in
@@ -22,10 +17,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local layers = {
['nerv.AffineLayer'] = {
- main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
- },
- [self.activation] = {
- activation = {dim_in = {dout}, dim_out = {dout}},
+ main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr, activation = layer_conf.activation},
},
['nerv.DuplicateLayer'] = {
duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
@@ -33,8 +25,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
}
local connections = {
- {'main[1]', 'activation[1]', 0},
- {'activation[1]', 'duplicate[1]', 0},
+ {'main[1]', 'duplicate[1]', 0},
{'duplicate[1]', 'main[1]', 1},
{'duplicate[2]', '<output>[1]', 0},
}