aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/m-tests/tnn_test.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/m-tests/tnn_test.lua')
-rw-r--r--nerv/examples/lmptb/m-tests/tnn_test.lua12
1 files changed, 9 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/m-tests/tnn_test.lua b/nerv/examples/lmptb/m-tests/tnn_test.lua
index 40e332c..a2c38f0 100644
--- a/nerv/examples/lmptb/m-tests/tnn_test.lua
+++ b/nerv/examples/lmptb/m-tests/tnn_test.lua
@@ -155,6 +155,9 @@ function lm_process_file(global_conf, fn, tnn, do_train)
local next_log_wcn = global_conf.log_w_num
+ global_conf.fz = 0
+ global_conf.fz2 = 0
+
while (1) do
local r, feeds
@@ -198,7 +201,7 @@ function lm_process_file(global_conf, fn, tnn, do_train)
next_log_wcn = next_log_wcn + global_conf.log_w_num
printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date())
printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn"))
- nerv.LMUtil.wait(1)
+ nerv.LMUtil.wait(0.1)
end
--[[
@@ -213,6 +216,9 @@ function lm_process_file(global_conf, fn, tnn, do_train)
--break --debug
end
+
+ print("gconf.fz", global_conf.fz)
+ print("gconf.fz2", global_conf.fz2)
printf("%s Displaying result:\n", global_conf.sche_log_pre)
printf("%s %s\n", global_conf.sche_log_pre, result:status("rnn"))
@@ -232,14 +238,14 @@ valid_fn = data_dir .. '/ptb.valid.txt.adds'
test_fn = data_dir .. '/ptb.test.txt.adds'
global_conf = {
- lrate = 1, wcost = 1e-6, momentum = 0,
+ lrate = 0.1, wcost = 1e-6, momentum = 0,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
nn_act_default = 0,
hidden_size = 200,
chunk_size = 15,
- batch_size = 10,
+ batch_size = 1,
max_iter = 25,
param_random = function() return (math.random() / 5 - 0.1) end,