aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/lstm.lua
blob: 3de3453ccc56d1eaf80cc7b5e155cbb0c15b023e (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer')

function LSTMLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self:check_dim_len(1, 1)

    local din = layer_conf.dim_in[1]
    local dout = layer_conf.dim_out[1]

    local pr = layer_conf.pr
    if pr == nil then
        pr = nerv.ParamRepo({}, self.loc_type)
    end

    local layers = {
        ['nerv.CombinerLayer'] = {
            mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}},
        },
        ['nerv.DuplicateLayer'] = {
            inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
            outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
            cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
        },
        ['nerv.AffineLayer'] = {
            mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
        },
        ['nerv.TanhLayer'] = {
            mainTanh = {dim_in = {dout}, dim_out = {dout}},
            outputTanh = {dim_in = {dout}, dim_out = {dout}},
        },
        ['nerv.LSTMGateLayer'] = {
            forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
            inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
            outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, param_type = {'N', 'N', 'D'}, pr = pr},
        },
        ['nerv.ElemMulLayer'] = {
            inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
            forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
            outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
        },
    }
    
    local connections = {
        -- lstm input
        {'<input>[1]', 'inputDup[1]', 0},

        -- input gate
        {'inputDup[1]', 'inputGate[1]', 0},
        {'outputDup[1]', 'inputGate[2]', 1},
        {'cellDup[1]', 'inputGate[3]', 1},

        -- forget gate
        {'inputDup[2]', 'forgetGate[1]', 0},
        {'outputDup[2]', 'forgetGate[2]', 1},
        {'cellDup[2]', 'forgetGate[3]', 1},

        -- lstm cell
        {'forgetGate[1]', 'forgetGateMul[1]', 0},
        {'cellDup[3]', 'forgetGateMul[2]', 1},
        {'inputDup[3]', 'mainAffine[1]', 0},
        {'outputDup[3]', 'mainAffine[2]', 1},
        {'mainAffine[1]', 'mainTanh[1]', 0},
        {'inputGate[1]', 'inputGateMul[1]', 0},
        {'mainTanh[1]', 'inputGateMul[2]', 0},
        {'inputGateMul[1]', 'mainCombine[1]', 0},
        {'forgetGateMul[1]', 'mainCombine[2]', 0},
        {'mainCombine[1]', 'cellDup[1]', 0},

        -- forget gate
        {'inputDup[4]', 'outputGate[1]', 0},
        {'outputDup[4]', 'outputGate[2]', 1},
        {'cellDup[4]', 'outputGate[3]', 0},

        -- lstm output
        {'cellDup[5]', 'outputTanh[1]', 0},
        {'outputGate[1]', 'outputGateMul[1]', 0},
        {'outputTanh[1]', 'outputGateMul[2]', 0},
        {'outputGateMul[1]', 'outputDup[1]', 0},
        {'outputDup[5]', '<output>[1]', 0},
    }

    self:add_prefix(layers, connections)
    local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
    self.lrepo = layer_repo
    self:graph_init(layer_repo, connections)
end