aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/ptb/select_linear.lua
diff options
context:
space:
mode:
authorTed Yin <Determinant@users.noreply.github.com>2016-03-31 20:44:17 +0800
committerTed Yin <Determinant@users.noreply.github.com>2016-03-31 20:44:17 +0800
commit7829db926609d3e0498349e1a09634531244e0e5 (patch)
tree08b2efe303b3aa8da3199641106419641296d8fc /nerv/examples/ptb/select_linear.lua
parent89d57b6fae6bcb0195a73fb97ab6870ee0d0ce20 (diff)
parent74d6956dc79b387289d911d9cbea5b7245405b62 (diff)
Merge pull request #37 from liuq901/master
add general trainer
Diffstat (limited to 'nerv/examples/ptb/select_linear.lua')
-rw-r--r--nerv/examples/ptb/select_linear.lua63
1 files changed, 63 insertions, 0 deletions
diff --git a/nerv/examples/ptb/select_linear.lua b/nerv/examples/ptb/select_linear.lua
new file mode 100644
index 0000000..42778f8
--- /dev/null
+++ b/nerv/examples/ptb/select_linear.lua
@@ -0,0 +1,63 @@
+local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
+
+--id: string
+--global_conf: table
+--layer_conf: table
+--Get Parameters
+function SL:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+
+ self.vocab = layer_conf.vocab
+
+ self:check_dim_len(1, 1)
+ self:bind_params()
+end
+
+function SL:bind_params()
+ self.ltp = self:find_param("ltp", self.lconf, self.gconf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp
+end
+
+--Check parameter
+function SL:init(batch_size)
+ if (self.dim_in[1] ~= 1) then --one word id
+ nerv.error("mismatching dimensions of ltp and input")
+ end
+ if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
+ nerv.error("mismatching dimensions of bp and output")
+ end
+
+ self.batch_size = bath_size
+ self.ltp:train_init()
+end
+
+function SL:update()
+ --use this to produce reproducable result, don't forget to set the dropout to zero!
+ --for i = 1, input[1]:nrow(), 1 do
+ -- local word_vec = self.ltp.trans[input[1][i - 1][0]]
+ -- word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
+ --end
+
+ --I tried the update_select_rows kernel which uses atomicAdd, but it generates unreproducable result
+ self.ltp:update_by_err_input()
+end
+
+function SL:propagate(input, output)
+ --for i = 0, input[1]:ncol() - 1, 1 do
+ -- if (input[1][0][i] > 0) then
+ -- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]])
+ -- else
+ -- output[1][i]:fill(0)
+ -- end
+ --end
+ output[1]:copy_rows_fromd_by_colidx(self.ltp.trans, input[1])
+end
+
+function SL:back_propagate(bp_err, next_bp_err, input, output)
+ --input is compressed, do nothing
+ self.ltp:back_propagate_by_err_input(bp_err[1], input[1]:decompress(self.vocab))
+end
+
+function SL:get_params()
+ local paramRepo = nerv.ParamRepo({self.ltp}, self.loc_type)
+ return paramRepo
+end