aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/lm_trainer.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-16 15:15:05 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-16 15:15:05 +0800
commit33e050ebe811c08abd2b4edc697e9cd9acadfddb (patch)
tree1c32dde27d3c168dfa283b241d4988601d6b2c66 /nerv/examples/lmptb/lm_trainer.lua
parent267a486fb78a985cbfdc60ef8549b3128f716713 (diff)
used os.clock() for timer
Diffstat (limited to 'nerv/examples/lmptb/lm_trainer.lua')
-rw-r--r--nerv/examples/lmptb/lm_trainer.lua10
1 files changed, 8 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua
index 2be97c8..7c11a34 100644
--- a/nerv/examples/lmptb/lm_trainer.lua
+++ b/nerv/examples/lmptb/lm_trainer.lua
@@ -22,6 +22,8 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train)
local next_log_wcn = global_conf.log_w_num
while (1) do
+ global_conf.timer:tic('most_out_loop_lmprocessfile')
+
local r, feeds
r, feeds = tnn:getFeedFromReader(reader)
@@ -60,12 +62,17 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train)
end
end
end
+
+ tnn:moveRightToNextMB()
+ global_conf.timer:tic('most_out_loop_lmprocessfile')
+
+ --print log
if (result["rnn"].cn_w > next_log_wcn) then
next_log_wcn = next_log_wcn + global_conf.log_w_num
printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date())
printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn"))
for key, value in pairs(global_conf.timer.rec) do
- printf("\t [global_conf.timer]: time spent on %s:%.5fs\n", key, value)
+ printf("\t [global_conf.timer]: time spent on %s:%.5f clock time\n", key, value)
end
global_conf.timer:flush()
nerv.LMUtil.wait(0.1)
@@ -77,7 +84,6 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train)
end
]]--
- tnn:moveRightToNextMB()
collectgarbage("collect")