aboutsummaryrefslogtreecommitdiff
path: root/lua/select_linear.lua
diff options
context:
space:
mode:
Diffstat (limited to 'lua/select_linear.lua')
-rw-r--r--lua/select_linear.lua62
1 files changed, 0 insertions, 62 deletions
diff --git a/lua/select_linear.lua b/lua/select_linear.lua
deleted file mode 100644
index a7e20cc..0000000
--- a/lua/select_linear.lua
+++ /dev/null
@@ -1,62 +0,0 @@
-local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
-
---id: string
---global_conf: table
---layer_conf: table
---Get Parameters
-function SL:__init(id, global_conf, layer_conf)
- self.id = id
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.gconf = global_conf
-
- self.vocab = layer_conf.vocab
- self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp
-
- self:check_dim_len(1, 1)
-end
-
---Check parameter
-function SL:init(batch_size)
- if (self.dim_in[1] ~= 1) then --one word id
- nerv.error("mismatching dimensions of ltp and input")
- end
- if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
- nerv.error("mismatching dimensions of bp and output")
- end
-
- self.batch_size = bath_size
- self.ltp:train_init()
-end
-
-function SL:update(bp_err, input, output)
- --use this to produce reproducable result, don't forget to set the dropout to zero!
- --for i = 1, input[1]:nrow(), 1 do
- -- local word_vec = self.ltp.trans[input[1][i - 1][0]]
- -- word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
- --end
-
- --I tried the update_select_rows kernel which uses atomicAdd, but it generates unreproducable result
- self.ltp.trans:update_select_rows_by_colidx(bp_err[1], input[1], - self.gconf.lrate / self.gconf.batch_size, 0)
- self.ltp.trans:add(self.ltp.trans, self.ltp.trans, 1.0, - self.gconf.lrate * self.gconf.wcost)
-end
-
-function SL:propagate(input, output)
- --for i = 0, input[1]:ncol() - 1, 1 do
- -- if (input[1][0][i] > 0) then
- -- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]])
- -- else
- -- output[1][i]:fill(0)
- -- end
- --end
- output[1]:copy_rows_fromd_by_colidx(self.ltp.trans, input[1])
-end
-
-function SL:back_propagate(bp_err, next_bp_err, input, output)
- --input is compressed, do nothing
-end
-
-function SL:get_params()
- local paramRepo = nerv.ParamRepo({self.ltp})
- return paramRepo
-end