diff options
author | txh18 <cloudygooseg@gmail.com> | 2015-11-20 21:49:33 +0800 |
---|---|---|
committer | txh18 <cloudygooseg@gmail.com> | 2015-11-20 21:49:33 +0800 |
commit | ddcb0a8f3ee045910acc618177dc5baf7adb8bf3 (patch) | |
tree | 0db82eb0b0ea0418803ed55c19dfc8eccc52b76a /nerv/examples/lmptb/tnn_ptb_main.lua | |
parent | 4f5b45b79b8d5f6a9094888cf6b929fe86ac24a3 (diff) |
complete auto-generate params
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r-- | nerv/examples/lmptb/tnn_ptb_main.lua | 16 |
1 files changed, 10 insertions, 6 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua index 6afecbf..3096a3f 100644 --- a/nerv/examples/lmptb/tnn_ptb_main.lua +++ b/nerv/examples/lmptb/tnn_ptb_main.lua @@ -21,7 +21,10 @@ function prepare_parameters(global_conf, iter) local paramRepo = global_conf.paramRepo if iter == -1 then --first time - printf("%s first time, generating parameters...\n", global_conf.sche_log_pre) + printf("%s first time, prepare some pre-set parameters, and leaving other parameters to auto-generation...\n", global_conf.sche_log_pre) + local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w') + f:close() + --[[ ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf) ltp_ih.trans = global_conf.cumat_type(global_conf.vocab:size(), global_conf.hidden_size) --index 0 is for zero, others correspond to vocab index(starting from 1) ltp_ih.trans:generate(global_conf.param_random) @@ -49,7 +52,7 @@ function prepare_parameters(global_conf, iter) f:write_chunk(bp_h) --f:write_chunk(bp_o) f:close() - + ]]-- return nil end @@ -71,7 +74,7 @@ function prepare_layers(global_conf) local du = false --local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}} - local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}} + local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}} local layers = { ["nerv.AffineRecurrentLayer"] = { @@ -79,7 +82,7 @@ function prepare_layers(global_conf) }, ["nerv.SelectLinearLayer"] = { - ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}, + ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}}, }, ["nerv.SigmoidLayer"] = { @@ -91,7 +94,7 @@ function prepare_layers(global_conf) }, ["nerv.AffineLayer"] = { - ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}}, + ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}}, }, ["nerv.SoftmaxCELayerT"] = { @@ -296,12 +299,13 @@ global_conf.vocab:build_file(global_conf.vocab_fn, false) ppl_rec = {} if start_iter == -1 then - prepare_parameters(global_conf, -1) --randomly generate parameters + prepare_parameters(global_conf, -1) --write pre_generated params to param.0 file end if start_iter == -1 or start_iter == 0 then print("===INITIAL VALIDATION===") local tnn = load_net(global_conf, 0) + global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update! nerv.LMUtil.wait(1) ppl_rec[0] = {} |