aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua37
1 files changed, 34 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index e9631ba..c875274 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -181,7 +181,38 @@ global_conf = {
vocab_fn = vocab_fn,
sche_log_pre = "[SCHEDULER]:",
log_w_num = 40000, --give a message when log_w_num words have been processed
- timer = nerv.Timer()
+ timer = nerv.Timer(),
+ work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test'
+}
+
+elseif (set == "msr_sc") then
+
+data_dir = '/home/slhome/txh18/workspace/sentenceCompletion/DATA_PV2'
+train_fn = data_dir .. '/normed_all.sf.len60.adds.train'
+valid_fn = data_dir .. '/normed_all.sf.len60.adds.dev'
+test_fn = data_dir .. '/answer_normed.adds'
+vocab_fn = data_dir .. '/normed_all.choose.vocab30000.addqvocab'
+
+global_conf = {
+ lrate = 1, wcost = 1e-6, momentum = 0,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 300,
+ chunk_size = 15,
+ batch_size = 10,
+ max_iter = 30,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ vocab_fn = vocab_fn,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 40000, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test'
}
else
@@ -209,12 +240,12 @@ global_conf = {
vocab_fn = vocab_fn,
sche_log_pre = "[SCHEDULER]:",
log_w_num = 10, --give a message when log_w_num words have been processed
- timer = nerv.Timer()
+ timer = nerv.Timer(),
+ work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test'
}
end
-global_conf.work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test'
global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf'
global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak'
global_conf.param_fn = global_conf.work_dir .. "/params"