diff options
Diffstat (limited to 'nerv/examples/lmptb/lm_trainer.lua')
-rw-r--r-- | nerv/examples/lmptb/lm_trainer.lua | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua index 2be97c8..7c11a34 100644 --- a/nerv/examples/lmptb/lm_trainer.lua +++ b/nerv/examples/lmptb/lm_trainer.lua @@ -22,6 +22,8 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train) local next_log_wcn = global_conf.log_w_num while (1) do + global_conf.timer:tic('most_out_loop_lmprocessfile') + local r, feeds r, feeds = tnn:getFeedFromReader(reader) @@ -60,12 +62,17 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train) end end end + + tnn:moveRightToNextMB() + global_conf.timer:tic('most_out_loop_lmprocessfile') + + --print log if (result["rnn"].cn_w > next_log_wcn) then next_log_wcn = next_log_wcn + global_conf.log_w_num printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date()) printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn")) for key, value in pairs(global_conf.timer.rec) do - printf("\t [global_conf.timer]: time spent on %s:%.5fs\n", key, value) + printf("\t [global_conf.timer]: time spent on %s:%.5f clock time\n", key, value) end global_conf.timer:flush() nerv.LMUtil.wait(0.1) @@ -77,7 +84,6 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train) end ]]-- - tnn:moveRightToNextMB() collectgarbage("collect") |