diff options
author | txh18 <[email protected]> | 2015-11-15 22:44:02 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-11-15 22:44:02 +0800 |
commit | ef40688d5a0a3b7eae18dc364a40ae4e8e7619e7 (patch) | |
tree | ec12c403ddc5a2c0d0928f3a8249e74b4c7d5916 | |
parent | 5760914d95059777c5e475f3c42d1b32983235a3 (diff) |
added timer
-rw-r--r-- | nerv/examples/lmptb/lm_trainer.lua | 4 | ||||
-rw-r--r-- | nerv/examples/lmptb/rnn/tnn.lua | 8 | ||||
-rw-r--r-- | nerv/examples/lmptb/tnn_ptb_main.lua | 3 |
3 files changed, 13 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua index d34634c..226873b 100644 --- a/nerv/examples/lmptb/lm_trainer.lua +++ b/nerv/examples/lmptb/lm_trainer.lua @@ -63,6 +63,10 @@ function LMTrainer.lm_process_file(global_conf, fn, tnn, do_train) next_log_wcn = next_log_wcn + global_conf.log_w_num printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date()) printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn")) + for key, value in pairs(global_conf.timer.rec) do + printf("\t [global_conf.timer]: time spent on %s:%.5fs\n", key, value) + end + global_conf.timer:flush() nerv.LMUtil.wait(0.1) end diff --git a/nerv/examples/lmptb/rnn/tnn.lua b/nerv/examples/lmptb/rnn/tnn.lua index 9850fe5..d6bf42e 100644 --- a/nerv/examples/lmptb/rnn/tnn.lua +++ b/nerv/examples/lmptb/rnn/tnn.lua @@ -384,8 +384,10 @@ function TNN:propagate_dfs(ref, t) end end ]]-- + self.gconf.timer:tic("tnn_actual_layer_propagate") ref.layer:propagate(ref.inputs_m[t], ref.outputs_m[t], t) --propagate! - + self.gconf.timer:toc("tnn_actual_layer_propagate") + if (bit.band(self.feeds_now.flagsPack_now[t], bit.bor(nerv.TNN.FC.SEQ_START, nerv.TNN.FC.SEQ_END)) > 0) then --restore cross-border history for i = 1, self.batch_size do local seq_start = bit.band(self.feeds_now.flags_now[t][i], nerv.TNN.FC.SEQ_START) @@ -487,10 +489,14 @@ function TNN:backpropagate_dfs(ref, t, do_update) --ok, do back_propagate --print("debug ok, back-propagating(or updating)") if (do_update == false) then + self.gconf.timer:tic("tnn_actual_layer_backpropagate") ref.layer:back_propagate(ref.err_inputs_m[t], ref.err_outputs_m[t], ref.inputs_m[t], ref.outputs_m[t], t) + self.gconf.timer:toc("tnn_actual_layer_backpropagate") else --print(ref.err_inputs_m[t][1]) + self.gconf.timer:tic("tnn_actual_layer_update") ref.layer:update(ref.err_inputs_m[t], ref.inputs_m[t], ref.outputs_m[t], t) + self.gconf.timer:toc("tnn_actual_layer_update") end if (do_update == false and bit.band(self.feeds_now.flagsPack_now[t], bit.bor(nerv.TNN.FC.SEQ_START, nerv.TNN.FC.SEQ_END)) > 0) then --flush cross-border errors diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua index c875274..891487c 100644 --- a/nerv/examples/lmptb/tnn_ptb_main.lua +++ b/nerv/examples/lmptb/tnn_ptb_main.lua @@ -168,7 +168,7 @@ global_conf = { mmat_type = nerv.MMatrixFloat, nn_act_default = 0, - hidden_size = 400, + hidden_size = 300, --set to 400 for a stable good test PPL chunk_size = 15, batch_size = 10, max_iter = 35, @@ -203,6 +203,7 @@ global_conf = { chunk_size = 15, batch_size = 10, max_iter = 30, + decay_iter = 10, param_random = function() return (math.random() / 5 - 0.1) end, train_fn = train_fn, |