1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
|
-- The following methods must be implemented to let a layer work properly
local Param = nerv.class('nerv.Param')
function Param:__init(id, global_conf)
self.id = id
self.gconf = global_conf
end
function Param:get_info()
return self.info
end
function Param:set_info(info)
self.info = info
end
function Param:read(handle)
nerv.error_method_not_implemented()
end
function Param:write(handle)
nerv.error_method_not_implemented()
end
function Param:update(gradient)
nerv.error_method_not_implemented()
end
local Layer = nerv.class('nerv.Layer')
function Layer:__init(id, global_conf, layer_conf)
nerv.error_method_not_implemented()
end
function Layer:init(batch_size)
nerv.error_method_not_implemented()
end
function Layer:update(bp_err, input, output)
nerv.error_method_not_implemented()
end
function Layer:propagate(input, output)
nerv.error_method_not_implemented()
end
function Layer:back_propagate(bp_err, next_bp_err, input, output)
nerv.error_method_not_implemented()
end
function Layer:check_dim_len(len_in, len_out)
local expected_in = #self.dim_in
local expected_out = #self.dim_out
if len_in > 0 and expected_in ~= len_in then
nerv.error("layer %s expects %d inputs, %d given",
self.id, len_in, expected_in)
end
if len_out > 0 and expected_out ~= len_out then
nerv.error("layer %s expects %d outputs, %d given",
self.id, len_out, expected_out)
end
end
function Layer:get_params()
nerv.error_method_not_implemented()
end
function Layer:get_dim()
return self.dim_in, self.dim_out
end
function Layer:set_attr(name, value)
self[name] = value
end
function Layer:get_sublayer(id)
nerv.error('primitive layer does not have sublayers')
end
function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim)
if type(pid_list) == "string" then
pid_list = {pid_list}
end
pid_list_str = table.tostring(pid_list)
for i, pid in ipairs(pid_list) do
if lconf[pid] ~= nil then
nerv.info("param [%s] of layer [%s] found in `layer_conf`.", pid, self.id)
return lconf[pid]
end
local pid_g = self.id .. '_' .. pid --global identifier
local pr = lconf.pr
local p
if pr ~= nil and pr:has_param(pid_g) == true then
nerv.info("param [%s] of layer [%s] found in `layer_conf.pr`.", pid_list_str, self.id)
p = pr:get_param(pid_g)
return p
end
end
nerv.info("param [%s] of layer [%s] is not found in `layer_conf` or `layer_conf.pr`, " ..
"switch to auto-generate", pid_list_str, self.id)
local pid_g = self.id .. '_' .. pid_list[1]
p = p_type(pid_g, gconf)
p.trans = gconf.cumat_type(unpack(p_dim))
if type(gconf.param_random) ~= "function" then
nerv.error("a param generate function is needed")
end
p.trans:generate(gconf.param_random)
return p
end
nerv.include('graph.lua')
nerv.include('affine.lua')
nerv.include('sigmoid.lua')
nerv.include('tanh.lua')
nerv.include('softmax_ce.lua')
nerv.include('bias.lua')
nerv.include('window.lua')
nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('softmax.lua')
nerv.include('elem_mul.lua')
nerv.include('lstm.lua')
nerv.include('lstm_gate.lua')
nerv.include('dropout.lua')
nerv.include('gru.lua')
nerv.include('rnn.lua')
nerv.include('duplicate.lua')
nerv.include('identity.lua')
-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
nerv.DropoutLayerT = nerv.DropoutLayer
nerv.GRULayerT = nerv.GRULayer
nerv.LSTMLayerT = nerv.LSTMLayer
nerv.SoftmaxCELayerT = nerv.SoftmaxCELayer
|