From 234a063bc3adb8fede6b65f660f00721ff53c6b7 Mon Sep 17 00:00:00 2001
From: txh18 <cloudygooseg@gmail.com>
Date: Fri, 25 Dec 2015 14:25:17 +0800
Subject: speedup testout in lm_trainer

---
 nerv/examples/lmptb/lm_trainer.lua | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua
index 06c1a4c..ba8d508 100644
--- a/nerv/examples/lmptb/lm_trainer.lua
+++ b/nerv/examples/lmptb/lm_trainer.lua
@@ -31,9 +31,9 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train, p_conf)
         if do_train == true then
             nerv.warning("LMTrainer.lm_process_file_rnn: warning, one_sen_report is true while do_train is also true, strange")
         end
-        nerv.printf("lm_process_file_rnn: one_sen report mode, set batch_size to 1 and chunk_size to max_sen_len(%d)\n", 
+        nerv.printf("lm_process_file_rnn: one_sen report mode, set chunk_size to max_sen_len(%d)\n", 
                 global_conf.max_sen_len)
-        batch_size = 1 
+        batch_size = global_conf.batch_size
         chunk_size = global_conf.max_sen_len
         r_conf["se_mode"] = true
     else
@@ -112,7 +112,9 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train, p_conf)
         end
         if p_conf.one_sen_report == true then
             for i = 1, batch_size do
-                nerv.printf("LMTrainer.lm_process_file_rnn: one_sen_report_output, %f\n", sen_logp[i])    
+                if feeds.labels_s[1][i] ~= global_conf.vocab.null_token then
+                   nerv.printf("LMTrainer.lm_process_file_rnn: one_sen_report_output, %f\n", sen_logp[i])    
+                end
             end
         end
 
-- 
cgit v1.2.3-70-g09d2