summaryrefslogtreecommitdiff
path: root/lua/select_linear.lua
diff options
context:
space:
mode:
authorQi Liu <[email protected]>2016-03-09 11:58:13 +0800
committerQi Liu <[email protected]>2016-03-09 11:58:13 +0800
commit05fcde5bf0caa1ceb70fef02fc88eda6f00c5ed5 (patch)
treea3bfb245d3f106525ec2ff4f987848fcd3f56217 /lua/select_linear.lua
parent4e56b863203ab6919192efe973ba9f8ee0d5ac65 (diff)
add recipe
Diffstat (limited to 'lua/select_linear.lua')
-rw-r--r--lua/select_linear.lua62
1 files changed, 62 insertions, 0 deletions
diff --git a/lua/select_linear.lua b/lua/select_linear.lua
new file mode 100644
index 0000000..a7e20cc
--- /dev/null
+++ b/lua/select_linear.lua
@@ -0,0 +1,62 @@
+local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
+
+--id: string
+--global_conf: table
+--layer_conf: table
+--Get Parameters
+function SL:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+
+ self.vocab = layer_conf.vocab
+ self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp
+
+ self:check_dim_len(1, 1)
+end
+
+--Check parameter
+function SL:init(batch_size)
+ if (self.dim_in[1] ~= 1) then --one word id
+ nerv.error("mismatching dimensions of ltp and input")
+ end
+ if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
+ nerv.error("mismatching dimensions of bp and output")
+ end
+
+ self.batch_size = bath_size
+ self.ltp:train_init()
+end
+
+function SL:update(bp_err, input, output)
+ --use this to produce reproducable result, don't forget to set the dropout to zero!
+ --for i = 1, input[1]:nrow(), 1 do
+ -- local word_vec = self.ltp.trans[input[1][i - 1][0]]
+ -- word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
+ --end
+
+ --I tried the update_select_rows kernel which uses atomicAdd, but it generates unreproducable result
+ self.ltp.trans:update_select_rows_by_colidx(bp_err[1], input[1], - self.gconf.lrate / self.gconf.batch_size, 0)
+ self.ltp.trans:add(self.ltp.trans, self.ltp.trans, 1.0, - self.gconf.lrate * self.gconf.wcost)
+end
+
+function SL:propagate(input, output)
+ --for i = 0, input[1]:ncol() - 1, 1 do
+ -- if (input[1][0][i] > 0) then
+ -- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]])
+ -- else
+ -- output[1][i]:fill(0)
+ -- end
+ --end
+ output[1]:copy_rows_fromd_by_colidx(self.ltp.trans, input[1])
+end
+
+function SL:back_propagate(bp_err, next_bp_err, input, output)
+ --input is compressed, do nothing
+end
+
+function SL:get_params()
+ local paramRepo = nerv.ParamRepo({self.ltp})
+ return paramRepo
+end