local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer') function LSTMLayer:__init(id, global_conf, layer_conf) nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(-1, 1) if #self.dim_in == 0 then nerv.error('LSTM layer %s has no input', self.id) end local din = layer_conf.dim_in local dout = layer_conf.dim_out[1] local pr = layer_conf.pr if pr == nil then pr = nerv.ParamRepo({}, self.loc_type) end local layers = { ['nerv.CombinerLayer'] = { mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}}, }, ['nerv.DuplicateLayer'] = { outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, }, ['nerv.AffineLayer'] = { mainAffine = {dim_in = table.connect(din, {dout}), dim_out = {dout}, pr = pr}, forgetGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, inputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, outputGate = {dim_in = table.connect(din, {dout, dout}), dim_out = {dout}, param_type = table.connect(table.vector(#din, 'N'), {'N', 'D'}), pr = pr, activation = nerv.SigmoidLayer}, }, ['nerv.TanhLayer'] = { mainTanh = {dim_in = {dout}, dim_out = {dout}}, outputTanh = {dim_in = {dout}, dim_out = {dout}}, }, ['nerv.ElemMulLayer'] = { inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}}, }, } for i = 1, #din do layers['nerv.DuplicateLayer']['inputDup' .. i] = {dim_in = {din[i]}, dim_out = {din[i], din[i], din[i], din[i]}} end local connections = { -- lstm input --{'[1 .. n]', 'inputDup(1 .. n)[1]', 0}, -- input gate {'outputDup[1]', 'inputGate[' .. (#din + 1) .. ']', 1}, {'cellDup[1]', 'inputGate[' .. (#din + 2) .. ']', 1}, --{'inputDup(1 .. n)[1]', 'inputGate[1 .. n]', 0}, -- forget gate {'outputDup[2]', 'forgetGate[' .. (#din + 1) .. ']', 1}, {'cellDup[2]', 'forgetGate[' .. (#din + 2) .. ']', 1}, --{'inputDup(1 .. n)[2]', 'forgetGate[1 .. n]', 0}, -- lstm cell {'forgetGate[1]', 'forgetGateMul[1]', 0}, {'cellDup[3]', 'forgetGateMul[2]', 1}, {'outputDup[3]', 'mainAffine[' .. (#din + 1) .. ']', 1}, --{'inputDup(1 .. n)[3]', 'mainAffine[1 .. n]', 0}, {'mainAffine[1]', 'mainTanh[1]', 0}, {'inputGate[1]', 'inputGateMul[1]', 0}, {'mainTanh[1]', 'inputGateMul[2]', 0}, {'inputGateMul[1]', 'mainCombine[1]', 0}, {'forgetGateMul[1]', 'mainCombine[2]', 0}, {'mainCombine[1]', 'cellDup[1]', 0}, -- forget gate {'outputDup[4]', 'outputGate[' .. (#din + 1) .. ']', 1}, {'cellDup[4]', 'outputGate[' .. (#din + 2) .. ']', 0}, --{'inputDup(1 .. n)[4]', 'outputGate[1 .. n]', 0}, -- lstm output {'cellDup[5]', 'outputTanh[1]', 0}, {'outputGate[1]', 'outputGateMul[1]', 0}, {'outputTanh[1]', 'outputGateMul[2]', 0}, {'outputGateMul[1]', 'outputDup[1]', 0}, {'outputDup[5]', '[1]', 0}, } for i = 1, #din do table.insert(connections, {'[' .. i .. ']', 'inputDup' .. i .. '[1]', 0}) table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. i .. ']', 0}) table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. i .. ']', 0}) table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. i .. ']', 0}) table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. i .. ']', 0}) end self:add_prefix(layers, connections) local layer_repo = nerv.LayerRepo(layers, pr, global_conf) self.lrepo = layer_repo self:graph_init(layer_repo, connections) end