diff options
author | txh18 <[email protected]> | 2015-12-25 14:25:17 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-12-25 14:25:17 +0800 |
commit | 234a063bc3adb8fede6b65f660f00721ff53c6b7 (patch) | |
tree | ee33a87121148276a7e8aa1d74b9a53d34a35698 | |
parent | 19c44a82abc8633a3f6e5560fd3089338b40c436 (diff) |
speedup testout in lm_trainer
-rw-r--r-- | nerv/examples/lmptb/lm_trainer.lua | 8 |
1 files changed, 5 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua index 06c1a4c..ba8d508 100644 --- a/nerv/examples/lmptb/lm_trainer.lua +++ b/nerv/examples/lmptb/lm_trainer.lua @@ -31,9 +31,9 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train, p_conf) if do_train == true then nerv.warning("LMTrainer.lm_process_file_rnn: warning, one_sen_report is true while do_train is also true, strange") end - nerv.printf("lm_process_file_rnn: one_sen report mode, set batch_size to 1 and chunk_size to max_sen_len(%d)\n", + nerv.printf("lm_process_file_rnn: one_sen report mode, set chunk_size to max_sen_len(%d)\n", global_conf.max_sen_len) - batch_size = 1 + batch_size = global_conf.batch_size chunk_size = global_conf.max_sen_len r_conf["se_mode"] = true else @@ -112,7 +112,9 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train, p_conf) end if p_conf.one_sen_report == true then for i = 1, batch_size do - nerv.printf("LMTrainer.lm_process_file_rnn: one_sen_report_output, %f\n", sen_logp[i]) + if feeds.labels_s[1][i] ~= global_conf.vocab.null_token then + nerv.printf("LMTrainer.lm_process_file_rnn: one_sen_report_output, %f\n", sen_logp[i]) + end end end |