From b4207a46686e899b797e70f0ace352107bbc0d54 Mon Sep 17 00:00:00 2001 From: txh18 Date: Fri, 27 Nov 2015 22:24:22 +0800 Subject: added clip_t for tnn --- nerv/examples/lmptb/lstmlm_ptb_main.lua | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) (limited to 'nerv/examples/lmptb/lstmlm_ptb_main.lua') diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua index 42b541f..69f26f5 100644 --- a/nerv/examples/lmptb/lstmlm_ptb_main.lua +++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua @@ -153,8 +153,9 @@ function prepare_tnn(global_conf, layerRepo) end ]]-- - local tnn = nerv.TNN("TNN", global_conf, {["dim_in"] = {1, global_conf.vocab:size()}, ["dim_out"] = {1}, ["sub_layers"] = layerRepo, - ["connections"] = connections_t, + local tnn = nerv.TNN("TNN", global_conf, {["dim_in"] = {1, global_conf.vocab:size()}, + ["dim_out"] = {1}, ["sub_layers"] = layerRepo, + ["connections"] = connections_t, ["clip_t"] = global_conf.clip_t, }) tnn:init(global_conf.batch_size, global_conf.chunk_size) @@ -183,12 +184,12 @@ test_fn = data_dir .. '/ptb.test.txt.adds' vocab_fn = data_dir .. '/vocab' global_conf = { - lrate = 1, wcost = 1e-6, momentum = 0, + lrate = 0.001, wcost = 1e-6, momentum = 0, clip_t = 0.01, cumat_type = nerv.CuMatrixFloat, mmat_type = nerv.MMatrixFloat, nn_act_default = 0, - hidden_size = 400, --set to 400 for a stable good test PPL + hidden_size = 200, --set to 400 for a stable good test PPL chunk_size = 15, batch_size = 10, max_iter = 35, @@ -200,9 +201,9 @@ global_conf = { test_fn = test_fn, vocab_fn = vocab_fn, sche_log_pre = "[SCHEDULER]:", - log_w_num = 40000, --give a message when log_w_num words have been processed + log_w_num = 400, --give a message when log_w_num words have been processed timer = nerv.Timer(), - work_dir_base = '/home/slhome/txh18/workspace/nerv/play/ptbEXP/tnn_test' + work_dir_base = '/home/slhome/txh18/workspace/nerv/play/ptbEXP/tnn_lstm_test' } elseif (set == "msr_sc") then @@ -278,7 +279,7 @@ else printf("%s no user setting, all default...\n", global_conf.sche_log_pre) end -global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost +global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size --.. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf' global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak' global_conf.param_fn = global_conf.work_dir .. "/params" -- cgit v1.2.3