aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortxh18 <[email protected]>2016-01-18 20:43:17 +0800
committertxh18 <[email protected]>2016-01-18 20:43:17 +0800
commitacc2b72b08aa7fe16b85fb47b0308e3da5adae66 (patch)
tree7a4bf506701351ef7ba6a6de3ef1553ebfc5e31e
parent398e76ed28d7bbdf8cdf39a6d8f0fb7d8656febe (diff)
developing sample_grulm
-rw-r--r--nerv/examples/lmptb/sample_grulm_ptb_main.lua431
1 files changed, 431 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/sample_grulm_ptb_main.lua b/nerv/examples/lmptb/sample_grulm_ptb_main.lua
new file mode 100644
index 0000000..86209da
--- /dev/null
+++ b/nerv/examples/lmptb/sample_grulm_ptb_main.lua
@@ -0,0 +1,431 @@
+require 'lmptb.lmvocab'
+require 'lmptb.lmfeeder'
+require 'lmptb.lmutil'
+require 'lmptb.layer.init'
+--require 'tnn.init'
+require 'lmptb.lmseqreader'
+require 'lm_trainer'
+
+--[[global function rename]]--
+--local printf = nerv.printf
+local LMTrainer = nerv.LMTrainer
+--[[global function rename ends]]--
+
+function prepare_parameters(global_conf, fn)
+ nerv.printf("%s preparing parameters...\n", global_conf.sche_log_pre)
+
+ global_conf.paramRepo = nerv.ParamRepo()
+ local paramRepo = global_conf.paramRepo
+
+ nerv.printf("%s loading parameter from file %s...\n", global_conf.sche_log_pre, fn)
+ paramRepo:import({fn}, nil, global_conf)
+
+ nerv.printf("%s preparing parameters end.\n", global_conf.sche_log_pre)
+
+ return nil
+end
+
+--global_conf: table
+--Returns: nerv.LayerRepo
+function prepare_layers(global_conf)
+ nerv.printf("%s preparing layers...\n", global_conf.sche_log_pre)
+
+ local pr = global_conf.paramRepo
+
+ local du = false
+
+ --local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}}
+ --local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du, ["pr"] = pr}}
+
+ local layers = {
+ ["nerv.GRULayerT"] = {
+ ["gruL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["pr"] = pr}},
+ },
+
+ ["nerv.DropoutLayerT"] = {
+ ["dropoutL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}},
+ },
+
+ ["nerv.SelectLinearLayer"] = {
+ ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab, ["pr"] = pr}},
+ },
+
+ ["nerv.CombinerLayer"] = {
+ ["combinerL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["lambda"] = {1}}},
+ },
+
+ ["nerv.AffineLayer"] = {
+ ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du, ["pr"] = pr}},
+ },
+
+ ["nerv.SoftmaxCELayerT"] = {
+ ["softmaxL"] = {{}, {["dim_in"] = {global_conf.vocab:size(), global_conf.vocab:size()}, ["dim_out"] = {1}}},
+ },
+ }
+
+ for l = 2, global_conf.layer_num do
+ layers["nerv.DropoutLayerT"]["dropoutL" .. l] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}}
+ layers["nerv.GRULayerT"]["gruL" .. l] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["pr"] = pr}}
+ layers["nerv.CombinerLayer"]["combinerL" .. l] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["lambda"] = {1}}}
+ end
+ --[[ --we do not need those in the new tnn framework
+ printf("%s adding %d bptt layers...\n", global_conf.sche_log_pre, global_conf.bptt)
+ for i = 1, global_conf.bptt do
+ layers["nerv.IndRecurrentLayer"]["recurrentL" .. (i + 1)] = recurrentLconfig
+ layers["nerv.SigmoidLayer"]["sigmoidL" .. (i + 1)] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}}
+ layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}
+ end
+ --]]
+
+ local layerRepo = nerv.LayerRepo(layers, pr, global_conf)
+ nerv.printf("%s preparing layers end.\n", global_conf.sche_log_pre)
+ return layerRepo
+end
+
+--global_conf: table
+--layerRepo: nerv.LayerRepo
+--Returns: a nerv.TNN
+function prepare_tnn(global_conf, layerRepo)
+ nerv.printf("%s Generate and initing TNN ...\n", global_conf.sche_log_pre)
+
+ --input: input_w, input_w, ... input_w_now, last_activation
+ local connections_t = {
+ {"<input>[1]", "selectL1[1]", 0},
+
+ --{"selectL1[1]", "recurrentL1[1]", 0},
+ --{"recurrentL1[1]", "sigmoidL1[1]", 0},
+ --{"sigmoidL1[1]", "combinerL1[1]", 0},
+ --{"combinerL1[1]", "recurrentL1[2]", 1},
+
+ {"selectL1[1]", "gruL1[1]", 0},
+ {"gruL1[1]", "combinerL1[1]", 0},
+ {"combinerL1[1]", "gruL1[2]", 1},
+ {"combinerL1[2]", "dropoutL1[1]", 0},
+
+ {"dropoutL"..global_conf.layer_num.."[1]", "outputL[1]", 0},
+ {"outputL[1]", "softmaxL[1]", 0},
+ {"<input>[2]", "softmaxL[2]", 0},
+ {"softmaxL[1]", "<output>[1]", 0}
+ }
+
+ for l = 2, global_conf.layer_num do
+ table.insert(connections_t, {"dropoutL"..(l-1).."[1]", "gruL"..l.."[1]", 0})
+ table.insert(connections_t, {"gruL"..l.."[1]", "combinerL"..l.."[1]", 0})
+ table.insert(connections_t, {"combinerL"..l.."[1]", "gruL"..l.."[2]", 1})
+ table.insert(connections_t, {"combinerL"..l.."[2]", "dropoutL"..l.."[1]", 0})
+ end
+
+ --[[
+ printf("%s printing DAG connections:\n", global_conf.sche_log_pre)
+ for key, value in pairs(connections_t) do
+ printf("\t%s->%s\n", key, value)
+ end
+ ]]--
+
+ local tnn = nerv.TNN("TNN", global_conf, {["dim_in"] = {1, global_conf.vocab:size()},
+ ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
+ ["connections"] = connections_t, ["clip_t"] = global_conf.clip_t,
+ })
+
+ tnn:init(global_conf.batch_size, global_conf.chunk_size)
+
+ nerv.printf("%s Initing TNN end.\n", global_conf.sche_log_pre)
+ return tnn
+end
+
+function prepare_dagL(global_conf, layerRepo)
+ nerv.printf("%s Generate and initing TNN ...\n", global_conf.sche_log_pre)
+
+ --input: input_w, input_w, ... input_w_now, last_activation
+ local connections_t = {
+ ["<input>[1]"] = "selectL1[1]",
+
+ ["selectL1[1]"] = "gruL1[1]",
+ ["gruL1[1]"] = "combinerL1[1]",
+ ["<input>[2]"] = "gruL1[2]",
+ --{"combinerL1[2]", "dropoutL1[1]", 0},
+
+ ["combinerL" .. global_conf.layer_num .. "[1]"] = "outputL[1]",
+ ["outputL[1]"] = "<output>[1]",
+ ["combinerL1[2]"] = "<output>[2]",
+ }
+
+ if global_conf.layer_num > 1 then
+ nerv.error("multiple layer is currently not supported(not hard to implement though)")
+ end
+ --[[
+ for l = 2, global_conf.layer_num do
+ table.insert(connections_t, {"dropoutL"..(l-1).."[1]", "gruL"..l.."[1]", 0})
+ table.insert(connections_t, {"gruL"..l.."[1]", "combinerL"..l.."[1]", 0})
+ table.insert(connections_t, {"combinerL"..l.."[1]", "gruL"..l.."[2]", 1})
+ table.insert(connections_t, {"combinerL"..l.."[2]", "dropoutL"..l.."[1]", 0})
+ end
+ ]]--
+
+ --[[
+ printf("%s printing DAG connections:\n", global_conf.sche_log_pre)
+ for key, value in pairs(connections_t) do
+ printf("\t%s->%s\n", key, value)
+ end
+ ]]--
+
+ local dagL = nerv.DAGLayerT("dagL", global_conf, {["dim_in"] = {1, global_conf.hidden_size},
+ ["dim_out"] = {global_conf.vocab:size(), global_conf.hidden_size}, ["sub_layers"] = layerRepo,
+ ["connections"] = connections_t
+ })
+
+ dagL:init(global_conf.batch_size)
+
+ nerv.printf("%s Initing DAGL end.\n", global_conf.sche_log_pre)
+ return tnn
+end
+
+function load_net_tnn(global_conf, fn)
+ prepare_parameters(global_conf, fn)
+ local layerRepo = prepare_layers(global_conf)
+ local tnn = prepare_tnn(global_conf, layerRepo)
+ return tnn
+end
+
+function load_net_dagL(global_conf, fn)
+ prepare_parameters(global_conf, fn)
+ local layerRepo = prepare_layers(global_conf)
+ local tnn = prepare_dagL(global_conf, layerRepo)
+ return tnn
+end
+
+local train_fn, valid_fn, test_fn
+global_conf = {}
+local set = arg[1] --"test"
+
+root_dir = '/home/slhome/txh18/workspace'
+
+if (set == "ptb") then
+
+data_dir = root_dir .. '/ptb/DATA'
+train_fn = data_dir .. '/ptb.train.txt.adds'
+valid_fn = data_dir .. '/ptb.valid.txt.adds'
+test_fn = data_dir .. '/ptb.test.txt.adds'
+vocab_fn = data_dir .. '/vocab'
+
+qdata_dir = root_dir .. '/ptb/questionGen/gen'
+
+global_conf = {
+ lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 5,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 300,
+ layer_num = 1,
+ chunk_size = 15,
+ batch_size = 32,
+ max_iter = 35,
+ lr_decay = 1.003,
+ decay_iter = 10,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+ dropout_str = "0.5",
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ vocab_fn = vocab_fn,
+ max_sen_len = 90,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 40000, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir_base = root_dir .. '/ptb/EXP-nerv/grulm_v1.0',
+
+ fn_to_sample = root_dir .. '/ptb/EXP-nerv/grulm_v1.0h300l1ch15ba32slr0.15wc1e-05dr0.5/params.final',
+}
+
+elseif (set == "msr_sc") then
+
+data_dir = '/home/slhome/txh18/workspace/sentenceCompletion/DATA_PV2'
+train_fn = data_dir .. '/normed_all.sf.len60.adds.train'
+valid_fn = data_dir .. '/normed_all.sf.len60.adds.dev'
+test_fn = data_dir .. '/answer_normed.adds'
+vocab_fn = data_dir .. '/normed_all.choose.vocab30000.addqvocab'
+
+global_conf = {
+ lrate = 1, wcost = 1e-6, momentum = 0,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 300,
+ layer_num = 1,
+ chunk_size = 15,
+ batch_size = 10,
+ max_iter = 30,
+ decay_iter = 10,
+ lr_decay = 1.003,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+ dropout_str = "0",
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ vocab_fn = vocab_fn,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 400000, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir_base = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test'
+}
+
+elseif (set == "twitter") then
+
+data_dir = root_dir .. '/twitter_new/DATA'
+train_fn = data_dir .. '/twitter.choose2.adds'
+valid_fn = data_dir .. '/twitter.valid.adds'
+test_fn = data_dir .. '/comm.test.choose-ppl.adds'
+vocab_fn = data_dir .. '/twitter.choose.train.vocab'
+
+--qdata_dir = root_dir .. '/ptb/questionGen/gen'
+
+global_conf = {
+ lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 5,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 300,
+ layer_num = 1,
+ chunk_size = 15,
+ batch_size = 32,
+ max_iter = 30,
+ lr_decay = 1.003,
+ decay_iter = 10,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+ dropout_str = "0.5",
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ vocab_fn = vocab_fn,
+ max_sen_len = 32,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 40000, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir_base = root_dir .. '/twitter_new/EXP-nerv/grulm_v1.0'
+}
+
+else
+
+valid_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+vocab_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn'
+
+global_conf = {
+ lrate = 0.01, wcost = 1e-5, momentum = 0,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ nn_act_default = 0,
+
+ hidden_size = 20,
+ layer_num = 1,
+ chunk_size = 2,
+ batch_size = 10,
+ max_iter = 3,
+ param_random = function() return (math.random() / 5 - 0.1) end,
+ dropout_str = "0",
+
+ train_fn = train_fn,
+ valid_fn = valid_fn,
+ test_fn = test_fn,
+ max_sen_len = 80,
+ lr_decay = 1.003,
+ decay_iter = 10,
+ vocab_fn = vocab_fn,
+ sche_log_pre = "[SCHEDULER]:",
+ log_w_num = 10, --give a message when log_w_num words have been processed
+ timer = nerv.Timer(),
+ work_dir_base = '/home/slhome/txh18/workspace/nerv/play/testEXP/tnn_lstmlm_test'
+}
+
+end
+
+lr_half = false --can not be local, to be set by loadstring
+start_iter = -1
+start_lr = nil
+ppl_last = 100000
+commands_str = "sampling" --"train:test"
+commands = {}
+test_iter = -1
+--for testout(question)
+q_file = "/home/slhome/txh18/workspace/ptb/questionGen/gen/ptb.test.txt.q10rs1_Msss.adds"
+
+if arg[2] ~= nil then
+ nerv.printf("%s applying arg[2](%s)...\n", global_conf.sche_log_pre, arg[2])
+ loadstring(arg[2])()
+ nerv.LMUtil.wait(0.5)
+else
+ nerv.printf("%s no user setting, all default...\n", global_conf.sche_log_pre)
+end
+
+global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost .. 'dr' .. global_conf.dropout_str
+global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf'
+global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak'
+global_conf.param_fn = global_conf.work_dir .. "/params"
+global_conf.dropout_list = nerv.SUtil.parse_schedule(global_conf.dropout_str)
+global_conf.log_fn = global_conf.work_dir .. '/log_lstm_tnn_' .. commands_str ..os.date("_TT%m_%d_%X",os.time())
+global_conf.log_fn, _ = string.gsub(global_conf.log_fn, ':', '-')
+commands = nerv.SUtil.parse_commands_set(commands_str)
+
+if start_lr ~= nil then
+ global_conf.lrate = start_lr
+end
+
+nerv.printf("%s creating work_dir(%s)...\n", global_conf.sche_log_pre, global_conf.work_dir)
+nerv.LMUtil.wait(2)
+os.execute("mkdir -p "..global_conf.work_dir)
+os.execute("cp " .. global_conf.train_fn .. " " .. global_conf.train_fn_shuf)
+
+--redirecting log outputs!
+nerv.SUtil.log_redirect(global_conf.log_fn)
+nerv.LMUtil.wait(2)
+
+----------------printing options---------------------------------
+nerv.printf("%s printing global_conf...\n", global_conf.sche_log_pre)
+for id, value in pairs(global_conf) do
+ nerv.printf("%s:\t%s\n", id, tostring(value))
+end
+nerv.LMUtil.wait(2)
+
+nerv.printf("%s printing training scheduling options...\n", global_conf.sche_log_pre)
+nerv.printf("lr_half:\t%s\n", tostring(lr_half))
+nerv.printf("start_iter:\t%s\n", tostring(start_iter))
+nerv.printf("ppl_last:\t%s\n", tostring(ppl_last))
+nerv.printf("commands_str:\t%s\n", commands_str)
+nerv.printf("test_iter:\t%s\n", tostring(test_iter))
+nerv.printf("%s printing training scheduling end.\n", global_conf.sche_log_pre)
+nerv.LMUtil.wait(2)
+------------------printing options end------------------------------
+
+math.randomseed(1)
+
+local vocab = nerv.LMVocab()
+global_conf["vocab"] = vocab
+nerv.printf("%s building vocab...\n", global_conf.sche_log_pre)
+global_conf.vocab:build_file(global_conf.vocab_fn, false)
+ppl_rec = {}
+
+local final_iter = -1
+if commands["test"] == 1 then
+ nerv.printf("===FINAL TEST===\n")
+ global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:"
+ local tnn = load_net_tnn(global_conf, global_conf.fn_to_sample)
+ global_conf.dropout_rate = 0
+ LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!
+end --if commands["test"]
+
+if commands["sampling"] == 1 then
+ nerv.printf("===SAMPLE===\n")
+ global_conf.sche_log_pre = "[SCHEDULER SAMPLING]:"
+ local dagL = load_net_dagL(global_conf, global_conf.fn_to_sample)
+ --global_conf.dropout_rate = 0
+ --LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update!
+end --if commands["sampling"]
+
+