aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/lstmlm_ptb_main.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/lstmlm_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/lstmlm_ptb_main.lua127
1 files changed, 94 insertions, 33 deletions
diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua
index 681c308..6e3fab9 100644
--- a/nerv/examples/lmptb/lstmlm_ptb_main.lua
+++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua
@@ -77,10 +77,6 @@ function prepare_layers(global_conf)
--local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du, ["pr"] = pr}}
local layers = {
- --["nerv.AffineRecurrentLayer"] = {
- -- ["recurrentL1"] = recurrentLconfig,
- --},
-
["nerv.LSTMLayerT"] = {
["lstmL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["pr"] = pr}},
},
@@ -93,10 +89,6 @@ function prepare_layers(global_conf)
["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab, ["pr"] = pr}},
},
- --["nerv.SigmoidLayer"] = {
- -- ["sigmoidL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}}
- --},
-
["nerv.CombinerLayer"] = {
["combinerL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["lambda"] = {1}}},
},
@@ -195,23 +187,26 @@ local set = arg[1] --"test"
if (set == "ptb") then
-data_dir = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/PTBdata'
+root_dir = '/home/slhome/txh18/workspace'
+data_dir = root_dir .. '/ptb/DATA'
train_fn = data_dir .. '/ptb.train.txt.adds'
valid_fn = data_dir .. '/ptb.valid.txt.adds'
test_fn = data_dir .. '/ptb.test.txt.adds'
vocab_fn = data_dir .. '/vocab'
+qdata_dir = root_dir .. '/ptb/questionGen/gen'
+
global_conf = {
- lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 2,
+ lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 5,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
nn_act_default = 0,
- hidden_size = 650,
- layer_num = 2,
+ hidden_size = 300,
+ layer_num = 1,
chunk_size = 15,
batch_size = 20,
- max_iter = 45,
+ max_iter = 35,
lr_decay = 1.003,
decay_iter = 10,
param_random = function() return (math.random() / 5 - 0.1) end,
@@ -221,10 +216,11 @@ global_conf = {
valid_fn = valid_fn,
test_fn = test_fn,
vocab_fn = vocab_fn,
+ max_sen_len = 90,
sche_log_pre = "[SCHEDULER]:",
log_w_num = 40000, --give a message when log_w_num words have been processed
timer = nerv.Timer(),
- work_dir_base = '/home/slhome/txh18/workspace/nerv/play/ptbEXP/tnn_lstm_test'
+ work_dir_base = '/home/slhome/txh18/workspace/ptb/EXP-nerv/lstmlm_v1.0'
}
elseif (set == "msr_sc") then
@@ -261,12 +257,50 @@ global_conf = {
work_dir_base = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test'
}
+elseif (set == "twitter") then
+
+root_dir = '/home/slhome/txh18/workspace'
+data_dir = root_dir .. '/twitter_new/DATA'
+train_fn = data_dir .. '/twitter.choose.adds'
+valid_fn = data_dir .. '/twitter.valid.adds'
+test_fn = data_dir .. '/comm.test.choose-ppl.adds'
+vocab_fn = data_dir .. '/twitter.choose.train.vocab'
+
+--qdata_dir = root_dir .. '/ptb/questionGen/gen'
+
+global_conf = {
+ lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 5,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 300,
+ layer_num = 1,
+ chunk_size = 15,
+ batch_size = 20,
+ max_iter = 35,
+ lr_decay = 1.003,
+ decay_iter = 10,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+ dropout_str = "0",
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ vocab_fn = vocab_fn,
+ max_sen_len = 90,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 40000, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir_base = root_dir .. '/twitter_new/EXP-nerv/lstmlm_v1.0'
+}
+
else
-valid_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text'
-train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text'
-test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text'
-vocab_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text'
+valid_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+vocab_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
global_conf = {
lrate = 0.01, wcost = 1e-5, momentum = 0,
@@ -285,6 +319,7 @@ global_conf = {
train_fn = train_fn,
valid_fn = valid_fn,
test_fn = test_fn,
+ max_sen_len = 80,
lr_decay = 1.003,
decay_iter = 10,
vocab_fn = vocab_fn,
@@ -296,12 +331,15 @@ global_conf = {
end
-local lr_half = false --can not be local, to be set by loadstring
-local start_iter = -1
-local ppl_last = 100000
-local commands_str = "train:test"
-local commands = {}
-local test_iter = -1
+lr_half = false --can not be local, to be set by loadstring
+start_iter = -1
+start_lr = global_conf.lrate
+ppl_last = 100000
+commands_str = "train:test"
+commands = {}
+test_iter = -1
+--for testout(question)
+q_file = "/home/slhome/txh18/workspace/ptb/questionGen/gen/ptb.test.txt.q10rs1_Msss.adds"
if arg[2] ~= nil then
nerv.printf("%s applying arg[2](%s)...\n", global_conf.sche_log_pre, arg[2])
@@ -311,21 +349,25 @@ else
nerv.printf("%s no user setting, all default...\n", global_conf.sche_log_pre)
end
-global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num --.. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost
+global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost .. 'dr' .. global_conf.dropout_str
global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf'
global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak'
global_conf.param_fn = global_conf.work_dir .. "/params"
global_conf.dropout_list = nerv.SUtil.parse_schedule(global_conf.dropout_str)
-global_conf.log_fn = global_conf.work_dir .. '/lstm_tnn_' .. commands_str .. '_log'
+global_conf.log_fn = global_conf.work_dir .. '/log_lstm_tnn_' .. commands_str ..os.date("_TT%m_%d_%X",os.time())
+global_conf.log_fn, _ = string.gsub(global_conf.log_fn, ':', '-')
commands = nerv.SUtil.parse_commands_set(commands_str)
-nerv.printf("%s creating work_dir...\n", global_conf.sche_log_pre)
-nerv.LMUtil.wait(1)
+global_conf.lrate = start_lr
+
+nerv.printf("%s creating work_dir(%s)...\n", global_conf.sche_log_pre, global_conf.work_dir)
+nerv.LMUtil.wait(2)
os.execute("mkdir -p "..global_conf.work_dir)
os.execute("cp " .. global_conf.train_fn .. " " .. global_conf.train_fn_shuf)
--redirecting log outputs!
nerv.SUtil.log_redirect(global_conf.log_fn)
+nerv.LMUtil.wait(2)
----------------printing options---------------------------------
nerv.printf("%s printing global_conf...\n", global_conf.sche_log_pre)
@@ -335,11 +377,11 @@ end
nerv.LMUtil.wait(2)
nerv.printf("%s printing training scheduling options...\n", global_conf.sche_log_pre)
-nerv.printf("lr_half:%s\n", tostring(lr_half))
-nerv.printf("start_iter:%s\n", tostring(start_iter))
-nerv.printf("ppl_last:%s\n", tostring(ppl_last))
-nerv.printf("commds_str:%s\n", commands_str)
-nerv.printf("test_iter:%s\n", tostring(test_iter))
+nerv.printf("lr_half:\t%s\n", tostring(lr_half))
+nerv.printf("start_iter:\t%s\n", tostring(start_iter))
+nerv.printf("ppl_last:\t%s\n", tostring(ppl_last))
+nerv.printf("commands_str:\t%s\n", commands_str)
+nerv.printf("test_iter:\t%s\n", tostring(test_iter))
nerv.printf("%s printing training scheduling end.\n", global_conf.sche_log_pre)
nerv.LMUtil.wait(2)
------------------printing options end------------------------------
@@ -441,3 +483,22 @@ if commands["test"] == 1 then
global_conf.dropout_rate = 0
LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!
end --if commands["test"]
+
+if commands["testout"] == 1 then
+ nerv.printf("===TEST OUT===\n")
+ nerv.printf("q_file:\t%s\n", q_file)
+ local q_fn = q_file --qdata_dir .. '/' .. q_file
+ global_conf.sche_log_pre = "[SCHEDULER TESTOUT]:"
+ if final_iter ~= -1 and test_iter == -1 then
+ test_iter = final_iter
+ end
+ if test_iter == -1 then
+ test_iter = "final"
+ end
+ tnn = load_net(global_conf, test_iter)
+ global_conf.dropout_rate = 0
+ LMTrainer.lm_process_file_rnn(global_conf, q_fn, tnn, false,
+ {["one_sen_report"] = true}) --false update!
+end --if commands["testout"]
+
+