aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn_ptb_main.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua8
1 files changed, 4 insertions, 4 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index 803ae68..5cc92c4 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -232,7 +232,7 @@ nerv.LMUtil.wait(3)
ppl_rec = {}
lr_rec = {}
ppl_rec[0] = {}
-ppl_rec[0].valid = result:ppl_net("rnn")
+ppl_rec[0].valid = result:ppl_all("rnn")
ppl_last = ppl_rec[0].valid
ppl_rec[0].train = 0
ppl_rec[0].test = 0
@@ -245,17 +245,17 @@ for iter = 1, global_conf.max_iter, 1 do
global_conf.sche_log_pre = "[SCHEDULER ITER"..iter.." LR"..global_conf.lrate.."]:"
result = LMTrainer.lm_process_file(global_conf, global_conf.train_fn_shuf, tnn, true) --true update!
ppl_rec[iter] = {}
- ppl_rec[iter].train = result:ppl_net("rnn")
+ ppl_rec[iter].train = result:ppl_all("rnn")
--shuffling training file
printf("%s shuffling training file\n", global_conf.sche_log_pre)
os.execute('cp ' .. global_conf.train_fn_shuf .. ' ' .. global_conf.train_fn_shuf_bak)
os.execute('cat ' .. global_conf.train_fn_shuf_bak .. ' | sort -R --random-source=/dev/zero > ' .. global_conf.train_fn_shuf)
printf("===PEEK ON TEST %d===\n", iter)
result = LMTrainer.lm_process_file(global_conf, global_conf.test_fn, tnn, false) --false update!
- ppl_rec[iter].test = result:ppl_net("rnn")
+ ppl_rec[iter].test = result:ppl_all("rnn")
printf("===VALIDATION %d===\n", iter)
result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
- ppl_rec[iter].valid = result:ppl_net("rnn")
+ ppl_rec[iter].valid = result:ppl_all("rnn")
lr_rec[iter] = global_conf.lrate
if (ppl_last / ppl_rec[iter].valid < 1.0003 or lr_half == true) then
global_conf.lrate = (global_conf.lrate * 0.6)