aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/lstmlm_ptb_main.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-12-02 18:00:47 +0800
committertxh18 <cloudygooseg@gmail.com>2015-12-02 18:00:47 +0800
commit094fc872d3e62c5f0950ac1747f130e30a08bee8 (patch)
tree2bb0c9df22c5899d9af4062f16c11261f23302dd /nerv/examples/lmptb/lstmlm_ptb_main.lua
parent41a841f3e0992a578cf5c8f82ae44a552a6f8b2f (diff)
added dropout_t layer
Diffstat (limited to 'nerv/examples/lmptb/lstmlm_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/lstmlm_ptb_main.lua9
1 files changed, 7 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua
index 24db06c..4c46369 100644
--- a/nerv/examples/lmptb/lstmlm_ptb_main.lua
+++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua
@@ -84,6 +84,10 @@ function prepare_layers(global_conf)
["nerv.LSTMLayerT"] = {
["lstmL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["pr"] = pr}},
},
+
+ ["nerv.DropoutLayerT"] = {
+ ["dropoutL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}},
+ },
["nerv.SelectLinearLayer"] = {
["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab, ["pr"] = pr}},
@@ -137,7 +141,8 @@ function prepare_tnn(global_conf, layerRepo)
{"selectL1[1]", "lstmL1[1]", 0},
{"lstmL1[2]", "lstmL1[3]", 1},
- {"lstmL1[1]", "combinerL1[1]", 0},
+ {"lstmL1[1]", "dropoutL1[1]", 0},
+ {"dropoutL1[1]", "combinerL1[1]", 0},
{"combinerL1[1]", "lstmL1[2]", 1},
{"combinerL1[2]", "outputL[1]", 0},
@@ -279,7 +284,7 @@ else
printf("%s no user setting, all default...\n", global_conf.sche_log_pre)
end
-global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost
+global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size --.. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost
global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf'
global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak'
global_conf.param_fn = global_conf.work_dir .. "/params"