aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/rnnlm_ptb_main.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/rnnlm_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/rnnlm_ptb_main.lua15
1 files changed, 15 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/rnnlm_ptb_main.lua b/nerv/examples/lmptb/rnnlm_ptb_main.lua
index e2ca860..dc011fb 100644
--- a/nerv/examples/lmptb/rnnlm_ptb_main.lua
+++ b/nerv/examples/lmptb/rnnlm_ptb_main.lua
@@ -190,6 +190,7 @@ global_conf = {
valid_fn = valid_fn,
test_fn = test_fn,
vocab_fn = vocab_fn,
+ max_sen_len = 90,
sche_log_pre = "[SCHEDULER]:",
log_w_num = 40000, --give a message when log_w_num words have been processed
timer = nerv.Timer(),
@@ -398,3 +399,17 @@ if commands["test"] == 1 then
LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!
end --if commands["test"]
+if commands["wordprob"] == 1 then
+ if final_iter ~= -1 and test_iter == -1 then
+ test_iter = final_iter
+ end
+ if test_iter == -1 then
+ test_iter = "final"
+ end
+
+ printf("===FINAL TEST===\n")
+ global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:"
+ tnn = load_net(global_conf, test_iter)
+ LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false, {["word_prob_report"] = true}) --false update!
+end --if commands["test"]
+