1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
|
local ProjectionLayer = nerv.class('nerv.ProjectionLayer', 'nerv.Layer')
--- The constructor.
function ProjectionLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
self:bind_params()
end
function ProjectionLayer:bind_params()
local lconf = self.lconf
lconf.no_update_ltp1 = lconf.no_update_ltp1 or lconf.no_update_ltp
for i = 1, #self.dim_in do
local pid = "ltp" .. i
local pid_list = i == 1 and {pid, "ltp"} or pid
self["ltp" .. i] = self:find_param(pid_list, lconf, self.gconf,
nerv.LinearTransParam,
{self.dim_in[i], self.dim_out[1]})
local no_update = lconf["no_update_ltp" .. i]
if (no_update ~= nil) and no_update or lconf.no_update_all then
self["ltp" .. i].no_update = true
end
end
self.ltp = self.ltp1 -- alias of ltp1
end
function ProjectionLayer:init(batch_size)
for i = 1, #self.dim_in do
if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
nerv.error("mismatching dimensions of linear transform parameter and input")
end
if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
nerv.error("mismatching dimensions of linear transform parameter and output")
end
self["ltp" .. i]:train_init()
end
end
function ProjectionLayer:batch_resize(batch_size)
-- do nothing
end
function ProjectionLayer:update()
for i = 1, #self.dim_in do
self["ltp" .. i]:update_by_err_input()
end
end
function ProjectionLayer:propagate(input, output)
-- apply linear transform
output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N')
for i = 2, #self.dim_in do
output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
end
end
function ProjectionLayer:back_propagate(bp_err, next_bp_err, input, output)
for i = 1, #self.dim_in do
next_bp_err[i]:mul(bp_err[1], self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
self["ltp" .. i]:back_propagate_by_err_input(bp_err[1], input[i])
end
end
function ProjectionLayer:get_params()
local pr = nerv.ParamRepo({self.ltp1}, self.loc_type)
for i = 2, #self.dim_in do
pr:add(self["ltp" .. i])
end
return pr
end
|