local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer')
function LSTMLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1)
local din = layer_conf.dim_in[1]
local dout = layer_conf.dim_out[1]
local pr = layer_conf.pr
if pr == nil then
pr = nerv.ParamRepo({}, self.loc_type)
end
local layers = {
['nerv.CombinerLayer'] = {
mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}},
},
['nerv.DuplicateLayer'] = {
inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
},
['nerv.AffineLayer'] = {
mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
},
['nerv.TanhLayer'] = {
mainTanh = {dim_in = {dout}, dim_out = {dout}},
outputTanh = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.LSTMGateLayer'] = {
forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
},
['nerv.ElemMulLayer'] = {
inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
},
}
local connections = {
-- lstm input
{'<input>[1]', 'inputDup[1]', 0},
-- input gate
{'inputDup[1]', 'inputGate[1]', 0},
{'outputDup[1]', 'inputGate[2]', 1},
{'cellDup[1]', 'inputGate[3]', 1},
-- forget gate
{'inputDup[2]', 'forgetGate[1]', 0},
{'outputDup[2]', 'forgetGate[2]', 1},
{'cellDup[2]', 'forgetGate[3]', 1},
-- lstm cell
{'forgetGate[1]', 'forgetGateMul[1]', 0},
{'cellDup[3]', 'forgetGateMul[2]', 1},
{'inputDup[3]', 'mainAffine[1]', 0},
{'outputDup[3]', 'mainAffine[2]', 1},
{'mainAffine[1]', 'mainTanh[1]', 0},
{'inputGate[1]', 'inputGateMul[1]', 0},
{'mainTanh[1]', 'inputGateMul[2]', 0},
{'inputGateMul[1]', 'mainCombine[1]', 0},
{'forgetGateMul[1]', 'mainCombine[2]', 0},
{'mainCombine[1]', 'cellDup[1]', 0},
-- forget gate
{'inputDup[4]', 'outputGate[1]', 0},
{'outputDup[4]', 'outputGate[2]', 1},
{'cellDup[4]', 'outputGate[3]', 0},
-- lstm output
{'cellDup[5]', 'outputTanh[1]', 0},
{'outputGate[1]', 'outputGateMul[1]', 0},
{'outputTanh[1]', 'outputGateMul[2]', 0},
{'outputGateMul[1]', 'outputDup[1]', 0},
{'outputDup[5]', '<output>[1]', 0},
}
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
self:graph_init(layer_repo, connections)
end