aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/rnnlm_ptb_main.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-12-02 20:29:56 +0800
committertxh18 <cloudygooseg@gmail.com>2015-12-02 20:29:56 +0800
commit103a4291349c0f55155ca97bd236fc7784d286ff (patch)
treef9b4c7e021779ba803791148cec6dcea28053e76 /nerv/examples/lmptb/rnnlm_ptb_main.lua
parent094fc872d3e62c5f0950ac1747f130e30a08bee8 (diff)
function name change in LMTrainer
Diffstat (limited to 'nerv/examples/lmptb/rnnlm_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/rnnlm_ptb_main.lua10
1 files changed, 5 insertions, 5 deletions
diff --git a/nerv/examples/lmptb/rnnlm_ptb_main.lua b/nerv/examples/lmptb/rnnlm_ptb_main.lua
index 16024a8..35b2e08 100644
--- a/nerv/examples/lmptb/rnnlm_ptb_main.lua
+++ b/nerv/examples/lmptb/rnnlm_ptb_main.lua
@@ -307,7 +307,7 @@ if start_iter == -1 or start_iter == 0 then
local tnn = load_net(global_conf, 0)
global_conf.paramRepo = tnn:get_params() --get auto-generted params
global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file
- local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
+ local result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.valid_fn, tnn, false) --false update!
nerv.LMUtil.wait(1)
ppl_rec[0] = {}
ppl_rec[0].valid = result:ppl_all("rnn")
@@ -327,7 +327,7 @@ for iter = start_iter, global_conf.max_iter, 1 do
global_conf.sche_log_pre = "[SCHEDULER ITER"..iter.." LR"..global_conf.lrate.."]:"
tnn = load_net(global_conf, iter - 1)
printf("===ITERATION %d LR %f===\n", iter, global_conf.lrate)
- result = LMTrainer.lm_process_file(global_conf, global_conf.train_fn_shuf, tnn, true) --true update!
+ result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.train_fn_shuf, tnn, true) --true update!
ppl_rec[iter] = {}
ppl_rec[iter].train = result:ppl_all("rnn")
--shuffling training file
@@ -335,10 +335,10 @@ for iter = start_iter, global_conf.max_iter, 1 do
os.execute('cp ' .. global_conf.train_fn_shuf .. ' ' .. global_conf.train_fn_shuf_bak)
os.execute('cat ' .. global_conf.train_fn_shuf_bak .. ' | sort -R --random-source=/dev/zero > ' .. global_conf.train_fn_shuf)
printf("===PEEK ON TEST %d===\n", iter)
- result = LMTrainer.lm_process_file(global_conf, global_conf.test_fn, tnn, false) --false update!
+ result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!
ppl_rec[iter].test = result:ppl_all("rnn")
printf("===VALIDATION %d===\n", iter)
- result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
+ result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.valid_fn, tnn, false) --false update!
ppl_rec[iter].valid = result:ppl_all("rnn")
ppl_rec[iter].lr = global_conf.lrate
if ((ppl_last / ppl_rec[iter].valid < 1.0003 or lr_half == true) and iter > global_conf.decay_iter) then
@@ -368,5 +368,5 @@ printf("\n")
printf("===FINAL TEST===\n")
global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:"
tnn = load_net(global_conf, final_iter)
-LMTrainer.lm_process_file(global_conf, global_conf.test_fn, tnn, false) --false update!
+LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!