diff options
author | txh18 <[email protected]> | 2015-12-10 17:20:31 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-12-10 17:20:31 +0800 |
commit | 5cf7e88df3aa4cf60819e955f0f537d2cfeccaac (patch) | |
tree | bbd18707232579e1683dc5f29e428d967221fa79 | |
parent | 91075c34160fa24e484148b26c1178e05c2212a4 (diff) |
removed flush_all for every mb in process_birnn
-rw-r--r-- | nerv/examples/lmptb/lm_trainer.lua | 3 |
1 files changed, 2 insertions, 1 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua index ecedc9f..eab6e2d 100644 --- a/nerv/examples/lmptb/lm_trainer.lua +++ b/nerv/examples/lmptb/lm_trainer.lua @@ -246,6 +246,8 @@ function LMTrainer.lm_process_file_birnn(global_conf, fn, tnn, do_train, p_conf) --tnn:move_right_to_nextmb({0}) --do not need history for bi directional model global_conf.timer:toc('tnn_afterprocess') + --tnn:flush_all() --you need this for bilstmlm_ptb_v2, because it has connection across 2 time steps + global_conf.timer:toc('most_out_loop_lmprocessfile') --print log @@ -268,7 +270,6 @@ function LMTrainer.lm_process_file_birnn(global_conf, fn, tnn, do_train, p_conf) collectgarbage("collect") - tnn:flush_all() --break --debug end |