diff options
Diffstat (limited to 'nerv/examples/lmptb/m-tests/tnn_test.lua')
-rw-r--r-- | nerv/examples/lmptb/m-tests/tnn_test.lua | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/nerv/examples/lmptb/m-tests/tnn_test.lua b/nerv/examples/lmptb/m-tests/tnn_test.lua index ddea54c..888ba0f 100644 --- a/nerv/examples/lmptb/m-tests/tnn_test.lua +++ b/nerv/examples/lmptb/m-tests/tnn_test.lua @@ -2,8 +2,8 @@ require 'lmptb.lmvocab' require 'lmptb.lmfeeder' require 'lmptb.lmutil' require 'lmptb.layer.init' +require 'rnn.init' require 'lmptb.lmseqreader' -require 'rnn.tnn' --[[global function rename]]-- printf = nerv.printf @@ -194,6 +194,7 @@ function lm_process_file(global_conf, fn, tnn, do_train) next_log_wcn = next_log_wcn + global_conf.log_w_num printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date()) printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn")) + nerv.LMUtil.wait(1) end --[[ @@ -259,17 +260,16 @@ global_conf = { nn_act_default = 0, hidden_size = 20, - chunk_size = 5, + chunk_size = 2, batch_size = 3, max_iter = 3, param_random = function() return (math.random() / 5 - 0.1) end, - independent = true, train_fn = train_fn, valid_fn = valid_fn, test_fn = test_fn, sche_log_pre = "[SCHEDULER]:", - log_w_num = 20, --give a message when log_w_num words have been processed + log_w_num = 10, --give a message when log_w_num words have been processed timer = nerv.Timer() } |