aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn_ptb_main.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-25 23:42:37 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-25 23:42:37 +0800
commit75a2d6a2a08caf987017f5a9043ac93afcd70980 (patch)
treed3b08fa846de56f3cd8a3f9a7bd3add200840ea8 /nerv/examples/lmptb/tnn_ptb_main.lua
parentca3500f01ea7ce695a4dbf70d2be8244827097c9 (diff)
changed auto-generating params, won not save in global_conf.param
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua13
1 files changed, 7 insertions, 6 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index 9156b61..16024a8 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -69,12 +69,12 @@ end
function prepare_layers(global_conf)
printf("%s preparing layers...\n", global_conf.sche_log_pre)
- local paramRepo = global_conf.paramRepo
+ local pr = global_conf.paramRepo
local du = false
--local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}}
- local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}}
+ local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du, ["pr"] = pr}}
local layers = {
["nerv.AffineRecurrentLayer"] = {
@@ -82,7 +82,7 @@ function prepare_layers(global_conf)
},
["nerv.SelectLinearLayer"] = {
- ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}},
+ ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab, ["pr"] = pr}},
},
["nerv.SigmoidLayer"] = {
@@ -94,7 +94,7 @@ function prepare_layers(global_conf)
},
["nerv.AffineLayer"] = {
- ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}},
+ ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du, ["pr"] = pr}},
},
["nerv.SoftmaxCELayerT"] = {
@@ -111,7 +111,7 @@ function prepare_layers(global_conf)
end
--]]
- local layerRepo = nerv.LayerRepo(layers, paramRepo, global_conf)
+ local layerRepo = nerv.LayerRepo(layers, pr, global_conf)
printf("%s preparing layers end.\n", global_conf.sche_log_pre)
return layerRepo
end
@@ -220,7 +220,7 @@ global_conf = {
test_fn = test_fn,
vocab_fn = vocab_fn,
sche_log_pre = "[SCHEDULER]:",
- log_w_num = 40000, --give a message when log_w_num words have been processed
+ log_w_num = 400000, --give a message when log_w_num words have been processed
timer = nerv.Timer(),
work_dir_base = '/home/slhome/txh18/workspace/sentenceCompletion/EXP-Nerv/rnnlm_test'
}
@@ -305,6 +305,7 @@ end
if start_iter == -1 or start_iter == 0 then
print("===INITIAL VALIDATION===")
local tnn = load_net(global_conf, 0)
+ global_conf.paramRepo = tnn:get_params() --get auto-generted params
global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file
local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
nerv.LMUtil.wait(1)