aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/main.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/main.lua')
-rw-r--r--nerv/examples/lmptb/main.lua6
1 files changed, 4 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/main.lua b/nerv/examples/lmptb/main.lua
index d505456..1939eda 100644
--- a/nerv/examples/lmptb/main.lua
+++ b/nerv/examples/lmptb/main.lua
@@ -224,9 +224,11 @@ function propagateFile(global_conf, dagL, fn, config)
if (result["rnn"].cn_w % global_conf.log_w_num == 0) then
printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date())
printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn"));
+ --[[
for key, value in pairs(global_conf.timer.rec) do
printf("\t [global_conf.timer]: time spent on %s:%.5fs\n", key, value)
end
+ ]]--
--comment this for debughtx
global_conf.timer:flush()
--nerv.CuMatrix.print_profile()
@@ -272,7 +274,7 @@ if (set == "ptb") then
hidden_size = 200,
batch_size = 10,
- bptt = 3, --train bptt_block's words. could be set to zero
+ bptt = 6, --train bptt_block's words. could be set to zero
max_iter = 18,
param_random = function() return (math.random() / 5 - 0.1) end,
independent = true,
@@ -281,7 +283,7 @@ if (set == "ptb") then
valid_fn = valid_fn,
test_fn = test_fn,
sche_log_pre = "[SCHEDULER]:",
- log_w_num = 500000, --give a message when log_w_num words have been processed
+ log_w_num = 100000, --give a message when log_w_num words have been processed
timer = nerv.Timer()
}
global_conf.work_dir = work_dir_base.."/h"..global_conf.hidden_size.."bp"..global_conf.bptt.."slr"..global_conf.lrate --..os.date("_%bD%dH%H") --comment this for testing