diff options
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r-- | nerv/examples/lmptb/tnn_ptb_main.lua | 73 |
1 files changed, 41 insertions, 32 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua index 50286c9..3096a3f 100644 --- a/nerv/examples/lmptb/tnn_ptb_main.lua +++ b/nerv/examples/lmptb/tnn_ptb_main.lua @@ -17,8 +17,14 @@ local LMTrainer = nerv.LMTrainer function prepare_parameters(global_conf, iter) printf("%s preparing parameters...\n", global_conf.sche_log_pre) + global_conf.paramRepo = nerv.ParamRepo() + local paramRepo = global_conf.paramRepo + if iter == -1 then --first time - printf("%s first time, generating parameters...\n", global_conf.sche_log_pre) + printf("%s first time, prepare some pre-set parameters, and leaving other parameters to auto-generation...\n", global_conf.sche_log_pre) + local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w') + f:close() + --[[ ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf) ltp_ih.trans = global_conf.cumat_type(global_conf.vocab:size(), global_conf.hidden_size) --index 0 is for zero, others correspond to vocab index(starting from 1) ltp_ih.trans:generate(global_conf.param_random) @@ -27,47 +33,48 @@ function prepare_parameters(global_conf, iter) ltp_hh.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.hidden_size) ltp_hh.trans:generate(global_conf.param_random) - ltp_ho = nerv.LinearTransParam("ltp_ho", global_conf) - ltp_ho.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.vocab:size()) - ltp_ho.trans:generate(global_conf.param_random) + --ltp_ho = nerv.LinearTransParam("ltp_ho", global_conf) + --ltp_ho.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.vocab:size()) + --ltp_ho.trans:generate(global_conf.param_random) bp_h = nerv.BiasParam("bp_h", global_conf) bp_h.trans = global_conf.cumat_type(1, global_conf.hidden_size) bp_h.trans:generate(global_conf.param_random) - bp_o = nerv.BiasParam("bp_o", global_conf) - bp_o.trans = global_conf.cumat_type(1, global_conf.vocab:size()) - bp_o.trans:generate(global_conf.param_random) + --bp_o = nerv.BiasParam("bp_o", global_conf) + --bp_o.trans = global_conf.cumat_type(1, global_conf.vocab:size()) + --bp_o.trans:generate(global_conf.param_random) local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w') f:write_chunk(ltp_ih) f:write_chunk(ltp_hh) - f:write_chunk(ltp_ho) + --f:write_chunk(ltp_ho) f:write_chunk(bp_h) - f:write_chunk(bp_o) + --f:write_chunk(bp_o) f:close() - + ]]-- return nil end printf("%s loading parameter from file %s...\n", global_conf.sche_log_pre, global_conf.param_fn .. '.' .. tostring(iter)) - local paramRepo = nerv.ParamRepo() paramRepo:import({global_conf.param_fn .. '.' .. tostring(iter)}, nil, global_conf) printf("%s preparing parameters end.\n", global_conf.sche_log_pre) - return paramRepo + return nil end --global_conf: table --Returns: nerv.LayerRepo -function prepare_layers(global_conf, paramRepo) +function prepare_layers(global_conf) printf("%s preparing layers...\n", global_conf.sche_log_pre) + local paramRepo = global_conf.paramRepo + local du = false --local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}} - local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}} + local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}} local layers = { ["nerv.AffineRecurrentLayer"] = { @@ -75,7 +82,7 @@ function prepare_layers(global_conf, paramRepo) }, ["nerv.SelectLinearLayer"] = { - ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}, + ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}}, }, ["nerv.SigmoidLayer"] = { @@ -87,7 +94,7 @@ function prepare_layers(global_conf, paramRepo) }, ["nerv.AffineLayer"] = { - ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}}, + ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}}, }, ["nerv.SoftmaxCELayerT"] = { @@ -146,10 +153,10 @@ function prepare_tnn(global_conf, layerRepo) end function load_net(global_conf, next_iter) - local paramRepo = prepare_parameters(global_conf, next_iter) - local layerRepo = prepare_layers(global_conf, paramRepo) + prepare_parameters(global_conf, next_iter) + local layerRepo = prepare_layers(global_conf) local tnn = prepare_tnn(global_conf, layerRepo) - return tnn, paramRepo + return tnn end local train_fn, valid_fn, test_fn @@ -184,7 +191,7 @@ global_conf = { sche_log_pre = "[SCHEDULER]:", log_w_num = 40000, --give a message when log_w_num words have been processed timer = nerv.Timer(), - work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test' + work_dir_base = '/home/slhome/txh18/workspace/nerv/play/ptbEXP/tnn_test' } elseif (set == "msr_sc") then @@ -215,7 +222,7 @@ global_conf = { sche_log_pre = "[SCHEDULER]:", log_w_num = 40000, --give a message when log_w_num words have been processed timer = nerv.Timer(), - work_dir = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test' + work_dir_base = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test' } else @@ -233,7 +240,7 @@ global_conf = { hidden_size = 20, chunk_size = 2, - batch_size = 3, + batch_size = 10, max_iter = 3, param_random = function() return (math.random() / 5 - 0.1) end, @@ -244,15 +251,11 @@ global_conf = { sche_log_pre = "[SCHEDULER]:", log_w_num = 10, --give a message when log_w_num words have been processed timer = nerv.Timer(), - work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test' + work_dir_base = '/home/slhome/txh18/workspace/nerv/play/testEXP/tnn_test' } end -global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf' -global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak' -global_conf.param_fn = global_conf.work_dir .. "/params" - lr_half = false --can not be local, to be set by loadstring start_iter = -1 ppl_last = 100000 @@ -264,6 +267,11 @@ else printf("%s not user setting, all default...\n", global_conf.sche_log_pre) end +global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate +global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf' +global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak' +global_conf.param_fn = global_conf.work_dir .. "/params" + ----------------printing options--------------------------------- printf("%s printing global_conf...\n", global_conf.sche_log_pre) for id, value in pairs(global_conf) do @@ -291,12 +299,13 @@ global_conf.vocab:build_file(global_conf.vocab_fn, false) ppl_rec = {} if start_iter == -1 then - prepare_parameters(global_conf, -1) --randomly generate parameters + prepare_parameters(global_conf, -1) --write pre_generated params to param.0 file end if start_iter == -1 or start_iter == 0 then print("===INITIAL VALIDATION===") - local tnn, paramRepo = load_net(global_conf, 0) + local tnn = load_net(global_conf, 0) + global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update! nerv.LMUtil.wait(1) ppl_rec[0] = {} @@ -315,7 +324,7 @@ local final_iter for iter = start_iter, global_conf.max_iter, 1 do final_iter = iter --for final testing global_conf.sche_log_pre = "[SCHEDULER ITER"..iter.." LR"..global_conf.lrate.."]:" - tnn, paramRepo = load_net(global_conf, iter - 1) + tnn = load_net(global_conf, iter - 1) printf("===ITERATION %d LR %f===\n", iter, global_conf.lrate) result = LMTrainer.lm_process_file(global_conf, global_conf.train_fn_shuf, tnn, true) --true update! ppl_rec[iter] = {} @@ -336,7 +345,7 @@ for iter = start_iter, global_conf.max_iter, 1 do end if ppl_rec[iter].valid < ppl_last then printf("%s PPL improves, saving net to file %s.%d...\n", global_conf.sche_log_pre, global_conf.param_fn, iter) - paramRepo:export(global_conf.param_fn .. '.' .. tostring(iter), nil) + global_conf.paramRepo:export(global_conf.param_fn .. '.' .. tostring(iter), nil) else printf("%s PPL did not improve, rejected, copying param file of last iter...\n", global_conf.sche_log_pre) os.execute('cp ' .. global_conf.param_fn..'.'..tostring(iter - 1) .. ' ' .. global_conf.param_fn..'.'..tostring(iter)) @@ -357,6 +366,6 @@ end printf("\n") printf("===FINAL TEST===\n") global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:" -tnn, paramRepo = load_net(global_conf, final_iter) +tnn = load_net(global_conf, final_iter) LMTrainer.lm_process_file(global_conf, global_conf.test_fn, tnn, false) --false update! |