diff options
author | Qi Liu <[email protected]> | 2016-03-11 18:18:59 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-03-11 18:18:59 +0800 |
commit | 2f46a5e2b37a054f482f76f4ac3d26b144cf988f (patch) | |
tree | e442e76741d664a29924c5f0ec6cc72e87345539 /lua/select_linear.lua | |
parent | 13729e83219cd90e33f329c49a50f6f4a4420721 (diff) |
add lua
Diffstat (limited to 'lua/select_linear.lua')
-rw-r--r-- | lua/select_linear.lua | 62 |
1 files changed, 62 insertions, 0 deletions
diff --git a/lua/select_linear.lua b/lua/select_linear.lua new file mode 100644 index 0000000..a7e20cc --- /dev/null +++ b/lua/select_linear.lua @@ -0,0 +1,62 @@ +local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer') + +--id: string +--global_conf: table +--layer_conf: table +--Get Parameters +function SL:__init(id, global_conf, layer_conf) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + + self.vocab = layer_conf.vocab + self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp + + self:check_dim_len(1, 1) +end + +--Check parameter +function SL:init(batch_size) + if (self.dim_in[1] ~= 1) then --one word id + nerv.error("mismatching dimensions of ltp and input") + end + if (self.dim_out[1] ~= self.ltp.trans:ncol()) then + nerv.error("mismatching dimensions of bp and output") + end + + self.batch_size = bath_size + self.ltp:train_init() +end + +function SL:update(bp_err, input, output) + --use this to produce reproducable result, don't forget to set the dropout to zero! + --for i = 1, input[1]:nrow(), 1 do + -- local word_vec = self.ltp.trans[input[1][i - 1][0]] + -- word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size) + --end + + --I tried the update_select_rows kernel which uses atomicAdd, but it generates unreproducable result + self.ltp.trans:update_select_rows_by_colidx(bp_err[1], input[1], - self.gconf.lrate / self.gconf.batch_size, 0) + self.ltp.trans:add(self.ltp.trans, self.ltp.trans, 1.0, - self.gconf.lrate * self.gconf.wcost) +end + +function SL:propagate(input, output) + --for i = 0, input[1]:ncol() - 1, 1 do + -- if (input[1][0][i] > 0) then + -- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]]) + -- else + -- output[1][i]:fill(0) + -- end + --end + output[1]:copy_rows_fromd_by_colidx(self.ltp.trans, input[1]) +end + +function SL:back_propagate(bp_err, next_bp_err, input, output) + --input is compressed, do nothing +end + +function SL:get_params() + local paramRepo = nerv.ParamRepo({self.ltp}) + return paramRepo +end |