1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
|
local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer')
function LSTMLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1)
if #self.dim_in == 0 then
nerv.error('LSTM layer %s has no input', self.id)
end
local din = layer_conf.dim_in
local dout = layer_conf.dim_out[1]
local pr = layer_conf.pr
if pr == nil then
pr = nerv.ParamRepo({}, self.loc_type)
end
local layers = {
['nerv.CombinerLayer'] = {
mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}},
},
['nerv.DuplicateLayer'] = {
outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
},
['nerv.AffineLayer'] = {
mainAffine = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
forgetGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
inputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
outputGate = {dim_in = table.connect({dout, dout}, din), dim_out = {dout},
param_type = table.connect({'N', 'D'}, table.vector(#din, 'N')), pr = pr, activation = nerv.SigmoidLayer},
},
['nerv.TanhLayer'] = {
mainTanh = {dim_in = {dout}, dim_out = {dout}},
outputTanh = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.ElemMulLayer'] = {
inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
},
}
for i = 1, #din do
layers['nerv.DuplicateLayer']['inputDup' .. i] = {dim_in = {din[i]}, dim_out = {din[i], din[i], din[i], din[i]}}
end
local connections = {
-- lstm input
--{'<input>[1 .. n]', 'inputDup(1 .. n)[1]', 0},
-- input gate
{'outputDup[1]', 'inputGate[1]', 1},
{'cellDup[1]', 'inputGate[2]', 1},
--{'inputDup(1 .. n)[1]', 'inputGate[3 .. n + 2]', 0},
-- forget gate
{'outputDup[2]', 'forgetGate[1]', 1},
{'cellDup[2]', 'forgetGate[2]', 1},
--{'inputDup(1 .. n)[2]', 'forgetGate[3 .. n + 2]', 0},
-- lstm cell
{'forgetGate[1]', 'forgetGateMul[1]', 0},
{'cellDup[3]', 'forgetGateMul[2]', 1},
{'outputDup[3]', 'mainAffine[1]', 1},
--{'inputDup(1 .. n)[3]', 'mainAffine[2 .. n + 1]', 0},
{'mainAffine[1]', 'mainTanh[1]', 0},
{'inputGate[1]', 'inputGateMul[1]', 0},
{'mainTanh[1]', 'inputGateMul[2]', 0},
{'inputGateMul[1]', 'mainCombine[1]', 0},
{'forgetGateMul[1]', 'mainCombine[2]', 0},
{'mainCombine[1]', 'cellDup[1]', 0},
-- forget gate
{'outputDup[4]', 'outputGate[1]', 1},
{'cellDup[4]', 'outputGate[2]', 0},
--{'inputDup(1 .. n)[4]', 'outputGate[2 .. n + 1]', 0},
-- lstm output
{'cellDup[5]', 'outputTanh[1]', 0},
{'outputGate[1]', 'outputGateMul[1]', 0},
{'outputTanh[1]', 'outputGateMul[2]', 0},
{'outputGateMul[1]', 'outputDup[1]', 0},
{'outputDup[5]', '<output>[1]', 0},
}
for i = 1, #din do
table.insert(connections, {'<input>[' .. i .. ']', 'inputDup' .. i .. '[1]', 0})
table.insert(connections, {'inputDup' .. i .. '[1]', 'inputGate[' .. (i + 2) .. ']', 0})
table.insert(connections, {'inputDup' .. i .. '[2]', 'forgetGate[' .. (i + 2) .. ']', 0})
table.insert(connections, {'inputDup' .. i .. '[3]', 'mainAffine[' .. (i + 1) .. ']', 0})
table.insert(connections, {'inputDup' .. i .. '[4]', 'outputGate[' .. (i + 2) .. ']', 0})
end
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
self.lrepo = layer_repo
self:graph_init(layer_repo, connections)
end
|