aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn_ptb_main.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-20 19:58:14 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-20 19:58:14 +0800
commit4f5b45b79b8d5f6a9094888cf6b929fe86ac24a3 (patch)
tree60d5b6232b8d33da9178717c27ecb6dc3591d0b6 /nerv/examples/lmptb/tnn_ptb_main.lua
parent6456f5d0b97c5ad7e35c58751f74b8c9fefb635e (diff)
working on automatic parameter for layers
Diffstat (limited to 'nerv/examples/lmptb/tnn_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua42
1 files changed, 23 insertions, 19 deletions
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index 059d52a..6afecbf 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -17,6 +17,9 @@ local LMTrainer = nerv.LMTrainer
function prepare_parameters(global_conf, iter)
printf("%s preparing parameters...\n", global_conf.sche_log_pre)
+ global_conf.paramRepo = nerv.ParamRepo()
+ local paramRepo = global_conf.paramRepo
+
if iter == -1 then --first time
printf("%s first time, generating parameters...\n", global_conf.sche_log_pre)
ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf)
@@ -27,43 +30,44 @@ function prepare_parameters(global_conf, iter)
ltp_hh.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.hidden_size)
ltp_hh.trans:generate(global_conf.param_random)
- ltp_ho = nerv.LinearTransParam("ltp_ho", global_conf)
- ltp_ho.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.vocab:size())
- ltp_ho.trans:generate(global_conf.param_random)
+ --ltp_ho = nerv.LinearTransParam("ltp_ho", global_conf)
+ --ltp_ho.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.vocab:size())
+ --ltp_ho.trans:generate(global_conf.param_random)
bp_h = nerv.BiasParam("bp_h", global_conf)
bp_h.trans = global_conf.cumat_type(1, global_conf.hidden_size)
bp_h.trans:generate(global_conf.param_random)
- bp_o = nerv.BiasParam("bp_o", global_conf)
- bp_o.trans = global_conf.cumat_type(1, global_conf.vocab:size())
- bp_o.trans:generate(global_conf.param_random)
+ --bp_o = nerv.BiasParam("bp_o", global_conf)
+ --bp_o.trans = global_conf.cumat_type(1, global_conf.vocab:size())
+ --bp_o.trans:generate(global_conf.param_random)
local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w')
f:write_chunk(ltp_ih)
f:write_chunk(ltp_hh)
- f:write_chunk(ltp_ho)
+ --f:write_chunk(ltp_ho)
f:write_chunk(bp_h)
- f:write_chunk(bp_o)
+ --f:write_chunk(bp_o)
f:close()
return nil
end
printf("%s loading parameter from file %s...\n", global_conf.sche_log_pre, global_conf.param_fn .. '.' .. tostring(iter))
- local paramRepo = nerv.ParamRepo()
paramRepo:import({global_conf.param_fn .. '.' .. tostring(iter)}, nil, global_conf)
printf("%s preparing parameters end.\n", global_conf.sche_log_pre)
- return paramRepo
+ return nil
end
--global_conf: table
--Returns: nerv.LayerRepo
-function prepare_layers(global_conf, paramRepo)
+function prepare_layers(global_conf)
printf("%s preparing layers...\n", global_conf.sche_log_pre)
+ local paramRepo = global_conf.paramRepo
+
local du = false
--local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}}
@@ -146,10 +150,10 @@ function prepare_tnn(global_conf, layerRepo)
end
function load_net(global_conf, next_iter)
- local paramRepo = prepare_parameters(global_conf, next_iter)
- local layerRepo = prepare_layers(global_conf, paramRepo)
+ prepare_parameters(global_conf, next_iter)
+ local layerRepo = prepare_layers(global_conf)
local tnn = prepare_tnn(global_conf, layerRepo)
- return tnn, paramRepo
+ return tnn
end
local train_fn, valid_fn, test_fn
@@ -233,7 +237,7 @@ global_conf = {
hidden_size = 20,
chunk_size = 2,
- batch_size = 3,
+ batch_size = 10,
max_iter = 3,
param_random = function() return (math.random() / 5 - 0.1) end,
@@ -297,7 +301,7 @@ end
if start_iter == -1 or start_iter == 0 then
print("===INITIAL VALIDATION===")
- local tnn, paramRepo = load_net(global_conf, 0)
+ local tnn = load_net(global_conf, 0)
local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
nerv.LMUtil.wait(1)
ppl_rec[0] = {}
@@ -316,7 +320,7 @@ local final_iter
for iter = start_iter, global_conf.max_iter, 1 do
final_iter = iter --for final testing
global_conf.sche_log_pre = "[SCHEDULER ITER"..iter.." LR"..global_conf.lrate.."]:"
- tnn, paramRepo = load_net(global_conf, iter - 1)
+ tnn = load_net(global_conf, iter - 1)
printf("===ITERATION %d LR %f===\n", iter, global_conf.lrate)
result = LMTrainer.lm_process_file(global_conf, global_conf.train_fn_shuf, tnn, true) --true update!
ppl_rec[iter] = {}
@@ -337,7 +341,7 @@ for iter = start_iter, global_conf.max_iter, 1 do
end
if ppl_rec[iter].valid < ppl_last then
printf("%s PPL improves, saving net to file %s.%d...\n", global_conf.sche_log_pre, global_conf.param_fn, iter)
- paramRepo:export(global_conf.param_fn .. '.' .. tostring(iter), nil)
+ global_conf.paramRepo:export(global_conf.param_fn .. '.' .. tostring(iter), nil)
else
printf("%s PPL did not improve, rejected, copying param file of last iter...\n", global_conf.sche_log_pre)
os.execute('cp ' .. global_conf.param_fn..'.'..tostring(iter - 1) .. ' ' .. global_conf.param_fn..'.'..tostring(iter))
@@ -358,6 +362,6 @@ end
printf("\n")
printf("===FINAL TEST===\n")
global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:"
-tnn, paramRepo = load_net(global_conf, final_iter)
+tnn = load_net(global_conf, final_iter)
LMTrainer.lm_process_file(global_conf, global_conf.test_fn, tnn, false) --false update!