aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn_ptb_main.lua
diff options
context:
space:
mode:
authortxh18 <[email protected]>2015-11-24 15:37:02 +0800
committertxh18 <[email protected]>2015-11-24 15:37:02 +0800
commit914a026734db6608e04987e9fcec9c82612e8673 (patch)
tree89e4db2987b6c757667aef96cc41974fda302927 /nerv/examples/lmptb/tnn_ptb_main.lua
parentf829b2b49d1db7fb6a49109722b9c7a41ae9324a (diff)
added wcost for biasparam in lm_trainer
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua6
1 files changed, 3 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index 66c7317..9156b61 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -172,7 +172,7 @@ test_fn = data_dir .. '/ptb.test.txt.adds'
vocab_fn = data_dir .. '/vocab'
global_conf = {
- lrate = 1, wcost = 1e-5, momentum = 0,
+ lrate = 1, wcost = 1e-6, momentum = 0,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
nn_act_default = 0,
@@ -181,7 +181,7 @@ global_conf = {
chunk_size = 15,
batch_size = 10,
max_iter = 35,
- decay_iter = 16,
+ decay_iter = 15,
param_random = function() return (math.random() / 5 - 0.1) end,
train_fn = train_fn,
@@ -267,7 +267,7 @@ else
printf("%s not user setting, all default...\n", global_conf.sche_log_pre)
end
-global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate
+global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost
global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf'
global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak'
global_conf.param_fn = global_conf.work_dir .. "/params"