1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
|
local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
--id: string
--global_conf: table
--layer_conf: table
--Get Parameters
function SL:__init(id, global_conf, layer_conf)
self.id = id
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
self.ltp = layer_conf.ltp
self.vocab = layer_conf.vocab
self:check_dim_len(1, 1)
end
--Check parameter
function SL:init(batch_size)
if (self.dim_in[1] ~= 1) then --one word id
nerv.error("mismatching dimensions of ltp and input")
end
if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
nerv.error("mismatching dimensions of bp and output")
end
self.batch_size = bath_size
self.ltp:train_init()
end
function SL:update(bp_err, input, output)
--for i = 1, input[1]:ncol(), 1 do
-- if (input[1][0][i - 1] ~= 0) then
-- local word_vec = self.ltp.trans[input[1][0][i - 1]]
--word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
-- end
--end
self.ltp.trans:update_select_rows(bp_err[1], input[1]:trans(), - self.gconf.lrate / self.gconf.batch_size, 0)
self.ltp.trans:add(self.ltp.trans, self.ltp.trans, 1.0, - self.gconf.lrate * self.gconf.wcost / self.gconf.batch_size)
end
function SL:propagate(input, output)
--for i = 0, input[1]:ncol() - 1, 1 do
-- if (input[1][0][i] > 0) then
-- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]])
-- else
-- output[1][i]:fill(0)
-- end
--end
output[1]:copy_rows_fromd_by_idx(self.ltp.trans, input[1]:trans())
end
function SL:back_propagate(bp_err, next_bp_err, input, output)
--input is compressed, do nothing
end
function SL:get_params()
local paramRepo = nerv.ParamRepo({self.ltp})
return paramRepo
end
|