diff options
author | txh18 <[email protected]> | 2015-11-16 11:44:43 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-11-16 11:44:43 +0800 |
commit | 267a486fb78a985cbfdc60ef8549b3128f716713 (patch) | |
tree | c60697e60ef5053203b5148cb3f0bfbf88a81c94 /nerv/examples/lmptb/tnn_ptb_main.lua | |
parent | ef40688d5a0a3b7eae18dc364a40ae4e8e7619e7 (diff) |
fixed direct update, did not know the result
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r-- | nerv/examples/lmptb/tnn_ptb_main.lua | 8 |
1 files changed, 5 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua index 891487c..19d0f8a 100644 --- a/nerv/examples/lmptb/tnn_ptb_main.lua +++ b/nerv/examples/lmptb/tnn_ptb_main.lua @@ -63,9 +63,11 @@ end --Returns: nerv.LayerRepo function prepare_layers(global_conf, paramRepo) printf("%s preparing layers...\n", global_conf.sche_log_pre) + + local du = true --local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}} - local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10}} + local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}} local layers = { ["nerv.AffineRecurrentLayer"] = { @@ -85,7 +87,7 @@ function prepare_layers(global_conf, paramRepo) }, ["nerv.AffineLayer"] = { - ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}}}, + ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}}, }, ["nerv.SoftmaxCELayerT"] = { @@ -168,7 +170,7 @@ global_conf = { mmat_type = nerv.MMatrixFloat, nn_act_default = 0, - hidden_size = 300, --set to 400 for a stable good test PPL + hidden_size = 400, --set to 400 for a stable good test PPL chunk_size = 15, batch_size = 10, max_iter = 35, |