aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/m-tests/tnn_test.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-08 17:59:27 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-08 17:59:27 +0800
commita8d369d49933ffdd306f47db6b74e0d27deda5d0 (patch)
tree8a2126153fb2363ffea94bfe8c0b9e88f11bd0ea /nerv/examples/lmptb/m-tests/tnn_test.lua
parentabc36052969ab121c8a1cfa478fc14e9e8dc78a2 (diff)
trying to test softmax_ce_t
Diffstat (limited to 'nerv/examples/lmptb/m-tests/tnn_test.lua')
-rw-r--r--nerv/examples/lmptb/m-tests/tnn_test.lua8
1 files changed, 4 insertions, 4 deletions
diff --git a/nerv/examples/lmptb/m-tests/tnn_test.lua b/nerv/examples/lmptb/m-tests/tnn_test.lua
index ddea54c..888ba0f 100644
--- a/nerv/examples/lmptb/m-tests/tnn_test.lua
+++ b/nerv/examples/lmptb/m-tests/tnn_test.lua
@@ -2,8 +2,8 @@ require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
require 'lmptb.layer.init'
+require 'rnn.init'
require 'lmptb.lmseqreader'
-require 'rnn.tnn'
--[[global function rename]]--
printf = nerv.printf
@@ -194,6 +194,7 @@ function lm_process_file(global_conf, fn, tnn, do_train)
next_log_wcn = next_log_wcn + global_conf.log_w_num
printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date())
printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn"))
+ nerv.LMUtil.wait(1)
end
--[[
@@ -259,17 +260,16 @@ global_conf = {
nn_act_default = 0,
hidden_size = 20,
- chunk_size = 5,
+ chunk_size = 2,
batch_size = 3,
max_iter = 3,
param_random = function() return (math.random() / 5 - 0.1) end,
- independent = true,
train_fn = train_fn,
valid_fn = valid_fn,
test_fn = test_fn,
sche_log_pre = "[SCHEDULER]:",
- log_w_num = 20, --give a message when log_w_num words have been processed
+ log_w_num = 10, --give a message when log_w_num words have been processed
timer = nerv.Timer()
}