aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcloudygoose <cloudygooseg@gmail.com>2015-08-11 12:02:36 +0800
committercloudygoose <cloudygooseg@gmail.com>2015-08-11 12:02:36 +0800
commit7e82fb775e314ab1c05d31fdd409177b3fe53a2a (patch)
treedab2a3cf0ef5ca5ece49a356f06f51253cc4d032
parent9cb04041b1f1aabfd67480140caa56325b95b9ad (diff)
...
-rw-r--r--nerv/examples/lmptb/lmptb/layer/select_linear.lua58
-rwxr-xr-xnerv/examples/lmptb/qsub-run.sh1
-rwxr-xr-xnerv/examples/lmptb/qsub.sh1
3 files changed, 60 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/lmptb/layer/select_linear.lua b/nerv/examples/lmptb/lmptb/layer/select_linear.lua
new file mode 100644
index 0000000..4798536
--- /dev/null
+++ b/nerv/examples/lmptb/lmptb/layer/select_linear.lua
@@ -0,0 +1,58 @@
+local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
+
+--id: string
+--global_conf: table
+--layer_conf: table
+--Get Parameters
+function SL:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+
+ self.ltp = layer_conf.ltp
+ self.vocab = layer_conf.vocab
+
+ self:check_dim_len(1, 1)
+end
+
+--Check parameter
+function SL:init(batch_size)
+ if (self.dim_in[1] ~= 1) then --one word id
+ nerv.error("mismatching dimensions of ltp and input")
+ end
+ if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
+ nerv.error("mismatching dimensions of bp and output")
+ end
+
+ self.batch_size = bath_size
+ self.ltp:train_init()
+end
+
+function SL:update(bp_err, input, output)
+ for i = 1, input[1]:nrow(), 1 do
+ if (input[1][i - 1][0] ~= 0) then
+ local word_vec = self.ltp.trans[input[1][i - 1][0] - 1]
+ word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
+ end
+ end
+end
+
+function SL:propagate(input, output)
+ for i = 0, input[1]:nrow() - 1, 1 do
+ if (input[1][i][0] > 0) then
+ output[1][i]:copy_fromd(self.ltp.trans[input[1][i][0] - 1])
+ else
+ output[1][i]:fill(0)
+ end
+ end
+end
+
+function SL:back_propagate(bp_err, next_bp_err, input, output)
+ --input is compressed, do nothing
+end
+
+function SL:get_params()
+ local paramRepo = nerv.ParamRepo({self.ltp})
+ return paramRepo
+end
diff --git a/nerv/examples/lmptb/qsub-run.sh b/nerv/examples/lmptb/qsub-run.sh
new file mode 100755
index 0000000..3973387
--- /dev/null
+++ b/nerv/examples/lmptb/qsub-run.sh
@@ -0,0 +1 @@
+./nerv main.lua
diff --git a/nerv/examples/lmptb/qsub.sh b/nerv/examples/lmptb/qsub.sh
new file mode 100755
index 0000000..ed795f0
--- /dev/null
+++ b/nerv/examples/lmptb/qsub.sh
@@ -0,0 +1 @@
+qsub -cwd -P gpu2.p -l gpu=1,hostname=cambridge -o ../../../logs/LOG_$(date +%Y%m%d_%H%M%S) -j y ./qsub-run.sh