diff options
-rw-r--r-- | nerv/examples/lmptb/bilstmlm_ptb_main.lua | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/bilstmlm_ptb_main.lua b/nerv/examples/lmptb/bilstmlm_ptb_main.lua index 078d036..45a724b 100644 --- a/nerv/examples/lmptb/bilstmlm_ptb_main.lua +++ b/nerv/examples/lmptb/bilstmlm_ptb_main.lua @@ -212,7 +212,7 @@ vocab_fn = data_dir .. '/vocab' qdata_dir = root_dir .. '/ptb/questionGen/gen' global_conf = { - lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 5, + lrate = 0.015, wcost = 1e-5, momentum = 0, clip_t = 5, cumat_type = nerv.CuMatrixFloat, mmat_type = nerv.MMatrixFloat, nn_act_default = 0, @@ -225,7 +225,7 @@ global_conf = { lr_decay = 1.003, decay_iter = 10, param_random = function() return (math.random() / 5 - 0.1) end, - dropout_str = "0.5", + dropout_str = "0", train_fn = train_fn, valid_fn = valid_fn, @@ -365,7 +365,7 @@ else nerv.printf("%s no user setting, all default...\n", global_conf.sche_log_pre) end -global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num --.. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost .. 'dr' .. global_conf.dropout_str +global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost .. 'dr' .. global_conf.dropout_str global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf' global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak' global_conf.param_fn = global_conf.work_dir .. "/params" |