aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/m-tests/tnn_test.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-08 19:49:15 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-08 19:49:15 +0800
commit1499ef632a2b9d63d6f68da9f42401d4d141a9f6 (patch)
treecb3e3b3353f5d3add6de43b0b63ddb2cc44bf215 /nerv/examples/lmptb/m-tests/tnn_test.lua
parentd9803ab152a0152f39ce2efa55004e98a9b6aa2d (diff)
switched to softmax_ce_t
Diffstat (limited to 'nerv/examples/lmptb/m-tests/tnn_test.lua')
-rw-r--r--nerv/examples/lmptb/m-tests/tnn_test.lua13
1 files changed, 11 insertions, 2 deletions
diff --git a/nerv/examples/lmptb/m-tests/tnn_test.lua b/nerv/examples/lmptb/m-tests/tnn_test.lua
index a778dea..7a8519e 100644
--- a/nerv/examples/lmptb/m-tests/tnn_test.lua
+++ b/nerv/examples/lmptb/m-tests/tnn_test.lua
@@ -82,7 +82,7 @@ function prepare_layers(global_conf, paramRepo)
["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}}},
},
- ["nerv.SoftmaxCELayer"] = {
+ ["nerv.SoftmaxCELayerT"] = {
["softmaxL"] = {{}, {["dim_in"] = {global_conf.vocab:size(), global_conf.vocab:size()}, ["dim_out"] = {1}}},
},
}
@@ -164,6 +164,15 @@ function lm_process_file(global_conf, fn, tnn, do_train)
r, feeds = tnn:getFeedFromReader(reader)
if (r == false) then break end
+
+ for t = 1, global_conf.chunk_size do
+ tnn.err_inputs_m[t][1]:fill(1)
+ for i = 1, global_conf.batch_size do
+ if (bit.bor(feeds.flags_now[t][i], nerv.TNN.FC.HAS_LABEL) == 0) then
+ tnn.err_inputs_m[t][1][i][0] = 0
+ end
+ end
+ end
--[[
for j = 1, global_conf.chunk_size, 1 do
@@ -242,7 +251,7 @@ global_conf = {
valid_fn = valid_fn,
test_fn = test_fn,
sche_log_pre = "[SCHEDULER]:",
- log_w_num = 10000, --give a message when log_w_num words have been processed
+ log_w_num = 40000, --give a message when log_w_num words have been processed
timer = nerv.Timer()
}