diff options
Diffstat (limited to 'nerv/layer/lstmp.lua')
-rw-r--r-- | nerv/layer/lstmp.lua | 91 |
1 files changed, 91 insertions, 0 deletions
diff --git a/nerv/layer/lstmp.lua b/nerv/layer/lstmp.lua new file mode 100644 index 0000000..bbb2091 --- /dev/null +++ b/nerv/layer/lstmp.lua @@ -0,0 +1,91 @@ +local LSTMPLayer = nerv.class('nerv.LSTMPLayer', 'nerv.GraphLayer') + +function LSTMPLayer:__init(id, global_conf, layer_conf) + nerv.Layer.__init(self, id, global_conf, layer_conf) + self:check_dim_len(1, 1) + + local din = layer_conf.dim_in[1] + local dcell = layer_conf.cell_dim + local dout = layer_conf.dim_out[1] + + local pr = layer_conf.pr + if pr == nil then + pr = nerv.ParamRepo({}, self.loc_type) + end + + local layers = { + ['nerv.CombinerLayer'] = { + mainCombine = {dim_in = {dcell, dcell}, dim_out = {dcell}, lambda = {1, 1}}, + }, + ['nerv.DuplicateLayer'] = { + inputDup = {dim_in = {din}, dim_out = {din, din, din, din}}, + outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}}, + cellDup = {dim_in = {dcell}, dim_out = {dcell, dcell, dcell, dcell, dcell}}, + }, + ['nerv.AffineLayer'] = { + mainAffine = {dim_in = {din, dout}, dim_out = {dcell}, pr = pr}, + }, + ['nerv.TanhLayer'] = { + mainTanh = {dim_in = {dcell}, dim_out = {dcell}}, + outputTanh = {dim_in = {dcell}, dim_out = {dcell}}, + }, + ['nerv.LSTMGateLayer'] = { + forgetGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr}, + inputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr}, + outputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr}, + }, + ['nerv.ElemMulLayer'] = { + inputGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}}, + forgetGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}}, + outputGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}}, + }, + ['nerv.ProjectionLayer'] = { + projection = {dim_in = {dcell}, dim_out = {dout}, pr = pr}, + }, + } + + local connections = { + -- lstm input + {'<input>[1]', 'inputDup[1]', 0}, + + -- input gate + {'inputDup[1]', 'inputGate[1]', 0}, + {'outputDup[1]', 'inputGate[2]', 1}, + {'cellDup[1]', 'inputGate[3]', 1}, + + -- forget gate + {'inputDup[2]', 'forgetGate[1]', 0}, + {'outputDup[2]', 'forgetGate[2]', 1}, + {'cellDup[2]', 'forgetGate[3]', 1}, + + -- lstm cell + {'forgetGate[1]', 'forgetGateMul[1]', 0}, + {'cellDup[3]', 'forgetGateMul[2]', 1}, + {'inputDup[3]', 'mainAffine[1]', 0}, + {'outputDup[3]', 'mainAffine[2]', 1}, + {'mainAffine[1]', 'mainTanh[1]', 0}, + {'inputGate[1]', 'inputGateMul[1]', 0}, + {'mainTanh[1]', 'inputGateMul[2]', 0}, + {'inputGateMul[1]', 'mainCombine[1]', 0}, + {'forgetGateMul[1]', 'mainCombine[2]', 0}, + {'mainCombine[1]', 'cellDup[1]', 0}, + + -- forget gate + {'inputDup[4]', 'outputGate[1]', 0}, + {'outputDup[4]', 'outputGate[2]', 1}, + {'cellDup[4]', 'outputGate[3]', 0}, + + -- lstm output + {'cellDup[5]', 'outputTanh[1]', 0}, + {'outputGate[1]', 'outputGateMul[1]', 0}, + {'outputTanh[1]', 'outputGateMul[2]', 0}, + {'outputGateMul[1]', 'projection[1]', 0}, + {'projection[1]', 'outputDup[1]', 0}, + {'outputDup[5]', '<output>[1]', 0}, + } + + self:add_prefix(layers, connections) + local layer_repo = nerv.LayerRepo(layers, pr, global_conf) + self.lrepo = layer_repo + self:graph_init(layer_repo, connections) +end |