diff options
Diffstat (limited to 'nerv/examples/lmptb/lstmlm_ptb_main.lua')
-rw-r--r-- | nerv/examples/lmptb/lstmlm_ptb_main.lua | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua index 2751ec8..804438d 100644 --- a/nerv/examples/lmptb/lstmlm_ptb_main.lua +++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua @@ -337,9 +337,8 @@ ppl_last = 100000 commands_str = "train:test" commands = {} test_iter = -1 - --for testout(question) -local q_file = "ptb.test.txt.q10rs1_Msss.adds" +q_file = "ptb.test.txt.q10rs1_Msss.adds" if arg[2] ~= nil then nerv.printf("%s applying arg[2](%s)...\n", global_conf.sche_log_pre, arg[2]) @@ -485,8 +484,8 @@ end --if commands["test"] if commands["testout"] == 1 then nerv.printf("===TEST OUT===\n") nerv.printf("q_file:\t%s\n", q_file) - local q_fn = qdata_dir .. q_file - global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:" + local q_fn = qdata_dir .. '/' .. q_file + global_conf.sche_log_pre = "[SCHEDULER TESTOUT]:" if final_iter ~= -1 and test_iter == -1 then test_iter = final_iter end @@ -495,7 +494,8 @@ if commands["testout"] == 1 then end tnn = load_net(global_conf, test_iter) global_conf.dropout_rate = 0 - LMTrainer.lm_process_file_rnn(global_conf, q_fn, tnn, false) --false update! + LMTrainer.lm_process_file_rnn(global_conf, q_fn, tnn, false, + {["one_sen_report"] = true}) --false update! end --if commands["testout"] |