diff options
author | txh18 <[email protected]> | 2015-12-04 14:58:17 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-12-04 14:58:17 +0800 |
commit | 618450eb71817ded45c422f35d8fede2d52a66b2 (patch) | |
tree | faab52eb3f6507331703b656c62a9e2ebf3b3f92 | |
parent | 39815c1faccbc64221579a4e13d193d64e68897b (diff) |
added log_redirect to SUtil
-rw-r--r-- | nerv/examples/lmptb/lm_trainer.lua | 14 | ||||
-rw-r--r-- | nerv/examples/lmptb/lstmlm_ptb_main.lua | 89 | ||||
-rw-r--r-- | nerv/examples/lmptb/m-tests/sutil_test.lua | 3 | ||||
-rw-r--r-- | nerv/tnn/sutil.lua | 15 |
4 files changed, 73 insertions, 48 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua index e5384b1..9ef4794 100644 --- a/nerv/examples/lmptb/lm_trainer.lua +++ b/nerv/examples/lmptb/lm_trainer.lua @@ -7,7 +7,7 @@ require 'lmptb.lmseqreader' local LMTrainer = nerv.class('nerv.LMTrainer') -local printf = nerv.printf +--local printf = nerv.printf --The bias param update in nerv don't have wcost added function nerv.BiasParam:update_by_gradient(gradient) @@ -87,10 +87,10 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train) --print log if result["rnn"].cn_w > next_log_wcn then next_log_wcn = next_log_wcn + global_conf.log_w_num - printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date()) - printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn")) + nerv.printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date()) + nerv.printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn")) for key, value in pairs(global_conf.timer.rec) do - printf("\t [global_conf.timer]: time spent on %s:%.5f clock time\n", key, value) + nerv.printf("\t [global_conf.timer]: time spent on %s:%.5f clock time\n", key, value) end global_conf.timer:flush() nerv.LMUtil.wait(0.1) @@ -108,9 +108,9 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train) --break --debug end - printf("%s Displaying result:\n", global_conf.sche_log_pre) - printf("%s %s\n", global_conf.sche_log_pre, result:status("rnn")) - printf("%s Doing on %s end.\n", global_conf.sche_log_pre, fn) + nerv.printf("%s Displaying result:\n", global_conf.sche_log_pre) + nerv.printf("%s %s\n", global_conf.sche_log_pre, result:status("rnn")) + nerv.printf("%s Doing on %s end.\n", global_conf.sche_log_pre, fn) return result end diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua index a49e5c2..681c308 100644 --- a/nerv/examples/lmptb/lstmlm_ptb_main.lua +++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua @@ -7,7 +7,7 @@ require 'lmptb.lmseqreader' require 'lm_trainer' --[[global function rename]]-- -local printf = nerv.printf +--local printf = nerv.printf local LMTrainer = nerv.LMTrainer --[[global function rename ends]]-- @@ -15,13 +15,13 @@ local LMTrainer = nerv.LMTrainer --first_time: bool --Returns: a ParamRepo function prepare_parameters(global_conf, iter) - printf("%s preparing parameters...\n", global_conf.sche_log_pre) + nerv.printf("%s preparing parameters...\n", global_conf.sche_log_pre) global_conf.paramRepo = nerv.ParamRepo() local paramRepo = global_conf.paramRepo if iter == -1 then --first time - printf("%s first time, prepare some pre-set parameters, and leaving other parameters to auto-generation...\n", global_conf.sche_log_pre) + nerv.printf("%s first time, prepare some pre-set parameters, and leaving other parameters to auto-generation...\n", global_conf.sche_log_pre) local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w') f:close() --[[ @@ -56,10 +56,10 @@ function prepare_parameters(global_conf, iter) return nil end - printf("%s loading parameter from file %s...\n", global_conf.sche_log_pre, global_conf.param_fn .. '.' .. tostring(iter)) + nerv.printf("%s loading parameter from file %s...\n", global_conf.sche_log_pre, global_conf.param_fn .. '.' .. tostring(iter)) paramRepo:import({global_conf.param_fn .. '.' .. tostring(iter)}, nil, global_conf) - printf("%s preparing parameters end.\n", global_conf.sche_log_pre) + nerv.printf("%s preparing parameters end.\n", global_conf.sche_log_pre) return nil end @@ -67,7 +67,7 @@ end --global_conf: table --Returns: nerv.LayerRepo function prepare_layers(global_conf) - printf("%s preparing layers...\n", global_conf.sche_log_pre) + nerv.printf("%s preparing layers...\n", global_conf.sche_log_pre) local pr = global_conf.paramRepo @@ -125,7 +125,7 @@ function prepare_layers(global_conf) --]] local layerRepo = nerv.LayerRepo(layers, pr, global_conf) - printf("%s preparing layers end.\n", global_conf.sche_log_pre) + nerv.printf("%s preparing layers end.\n", global_conf.sche_log_pre) return layerRepo end @@ -133,7 +133,7 @@ end --layerRepo: nerv.LayerRepo --Returns: a nerv.TNN function prepare_tnn(global_conf, layerRepo) - printf("%s Generate and initing TNN ...\n", global_conf.sche_log_pre) + nerv.printf("%s Generate and initing TNN ...\n", global_conf.sche_log_pre) --input: input_w, input_w, ... input_w_now, last_activation local connections_t = { @@ -178,7 +178,7 @@ function prepare_tnn(global_conf, layerRepo) tnn:init(global_conf.batch_size, global_conf.chunk_size) - printf("%s Initing TNN end.\n", global_conf.sche_log_pre) + nerv.printf("%s Initing TNN end.\n", global_conf.sche_log_pre) return tnn end @@ -202,7 +202,7 @@ test_fn = data_dir .. '/ptb.test.txt.adds' vocab_fn = data_dir .. '/vocab' global_conf = { - lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 10, + lrate = 0.15, wcost = 1e-5, momentum = 0, clip_t = 2, cumat_type = nerv.CuMatrixFloat, mmat_type = nerv.MMatrixFloat, nn_act_default = 0, @@ -214,7 +214,7 @@ global_conf = { max_iter = 45, lr_decay = 1.003, decay_iter = 10, - param_random = function() return (math.random() / 50 - 0.01) end, + param_random = function() return (math.random() / 5 - 0.1) end, dropout_str = "0.5", train_fn = train_fn, @@ -303,12 +303,12 @@ local commands_str = "train:test" local commands = {} local test_iter = -1 -if (arg[2] ~= nil) then - printf("%s applying arg[2](%s)...\n", global_conf.sche_log_pre, arg[2]) +if arg[2] ~= nil then + nerv.printf("%s applying arg[2](%s)...\n", global_conf.sche_log_pre, arg[2]) loadstring(arg[2])() nerv.LMUtil.wait(0.5) else - printf("%s no user setting, all default...\n", global_conf.sche_log_pre) + nerv.printf("%s no user setting, all default...\n", global_conf.sche_log_pre) end global_conf.work_dir = global_conf.work_dir_base .. 'h' .. global_conf.hidden_size .. 'l' .. global_conf.layer_num --.. 'ch' .. global_conf.chunk_size .. 'ba' .. global_conf.batch_size .. 'slr' .. global_conf.lrate .. 'wc' .. global_conf.wcost @@ -316,32 +316,39 @@ global_conf.train_fn_shuf = global_conf.work_dir .. '/train_fn_shuf' global_conf.train_fn_shuf_bak = global_conf.train_fn_shuf .. '_bak' global_conf.param_fn = global_conf.work_dir .. "/params" global_conf.dropout_list = nerv.SUtil.parse_schedule(global_conf.dropout_str) +global_conf.log_fn = global_conf.work_dir .. '/lstm_tnn_' .. commands_str .. '_log' commands = nerv.SUtil.parse_commands_set(commands_str) + +nerv.printf("%s creating work_dir...\n", global_conf.sche_log_pre) +nerv.LMUtil.wait(1) +os.execute("mkdir -p "..global_conf.work_dir) +os.execute("cp " .. global_conf.train_fn .. " " .. global_conf.train_fn_shuf) + +--redirecting log outputs! +nerv.SUtil.log_redirect(global_conf.log_fn) + ----------------printing options--------------------------------- -printf("%s printing global_conf...\n", global_conf.sche_log_pre) +nerv.printf("%s printing global_conf...\n", global_conf.sche_log_pre) for id, value in pairs(global_conf) do - print(id, value) + nerv.printf("%s:\t%s\n", id, tostring(value)) end nerv.LMUtil.wait(2) -printf("%s printing training scheduling options...\n", global_conf.sche_log_pre) -print("lr_half", lr_half) -print("start_iter", start_iter) -print("ppl_last", ppl_last) -print("commds_str", commands_str) -print("test_iter", test_iter) -printf("%s printing training scheduling end.\n", global_conf.sche_log_pre) + +nerv.printf("%s printing training scheduling options...\n", global_conf.sche_log_pre) +nerv.printf("lr_half:%s\n", tostring(lr_half)) +nerv.printf("start_iter:%s\n", tostring(start_iter)) +nerv.printf("ppl_last:%s\n", tostring(ppl_last)) +nerv.printf("commds_str:%s\n", commands_str) +nerv.printf("test_iter:%s\n", tostring(test_iter)) +nerv.printf("%s printing training scheduling end.\n", global_conf.sche_log_pre) nerv.LMUtil.wait(2) ------------------printing options end------------------------------ math.randomseed(1) -printf("%s creating work_dir...\n", global_conf.sche_log_pre) -os.execute("mkdir -p "..global_conf.work_dir) -os.execute("cp " .. global_conf.train_fn .. " " .. global_conf.train_fn_shuf) - local vocab = nerv.LMVocab() global_conf["vocab"] = vocab -printf("%s building vocab...\n", global_conf.sche_log_pre) +nerv.printf("%s building vocab...\n", global_conf.sche_log_pre) global_conf.vocab:build_file(global_conf.vocab_fn, false) ppl_rec = {} @@ -352,7 +359,7 @@ if commands["train"] == 1 then end if start_iter == -1 or start_iter == 0 then - print("===INITIAL VALIDATION===") + nerv.printf("===INITIAL VALIDATION===\n") local tnn = load_net(global_conf, 0) global_conf.paramRepo = tnn:get_params() --get auto-generted params global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file @@ -368,27 +375,27 @@ if commands["train"] == 1 then start_iter = 1 - print() + nerv.printf("\n") end for iter = start_iter, global_conf.max_iter, 1 do final_iter = iter --for final testing global_conf.sche_log_pre = "[SCHEDULER ITER"..iter.." LR"..global_conf.lrate.."]:" tnn = load_net(global_conf, iter - 1) - printf("===ITERATION %d LR %f===\n", iter, global_conf.lrate) + nerv.printf("===ITERATION %d LR %f===\n", iter, global_conf.lrate) global_conf.dropout_rate = nerv.SUtil.sche_get(global_conf.dropout_list, iter) result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.train_fn_shuf, tnn, true) --true update! global_conf.dropout_rate = 0 ppl_rec[iter] = {} ppl_rec[iter].train = result:ppl_all("rnn") --shuffling training file - printf("%s shuffling training file\n", global_conf.sche_log_pre) + nerv.printf("%s shuffling training file\n", global_conf.sche_log_pre) os.execute('cp ' .. global_conf.train_fn_shuf .. ' ' .. global_conf.train_fn_shuf_bak) os.execute('cat ' .. global_conf.train_fn_shuf_bak .. ' | sort -R --random-source=/dev/zero > ' .. global_conf.train_fn_shuf) - printf("===PEEK ON TEST %d===\n", iter) + nerv.printf("===PEEK ON TEST %d===\n", iter) result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.test_fn, tnn, false) --false update! ppl_rec[iter].test = result:ppl_all("rnn") - printf("===VALIDATION %d===\n", iter) + nerv.printf("===VALIDATION %d===\n", iter) result = LMTrainer.lm_process_file_rnn(global_conf, global_conf.valid_fn, tnn, false) --false update! ppl_rec[iter].valid = result:ppl_all("rnn") ppl_rec[iter].lr = global_conf.lrate @@ -396,10 +403,10 @@ if commands["train"] == 1 then global_conf.lrate = (global_conf.lrate * 0.6) end if ppl_rec[iter].valid < ppl_last then - printf("%s PPL improves, saving net to file %s.%d...\n", global_conf.sche_log_pre, global_conf.param_fn, iter) + nerv.printf("%s PPL improves, saving net to file %s.%d...\n", global_conf.sche_log_pre, global_conf.param_fn, iter) global_conf.paramRepo:export(global_conf.param_fn .. '.' .. tostring(iter), nil) else - printf("%s PPL did not improve, rejected, copying param file of last iter...\n", global_conf.sche_log_pre) + nerv.printf("%s PPL did not improve, rejected, copying param file of last iter...\n", global_conf.sche_log_pre) os.execute('cp ' .. global_conf.param_fn..'.'..tostring(iter - 1) .. ' ' .. global_conf.param_fn..'.'..tostring(iter)) end if ppl_last / ppl_rec[iter].valid < global_conf.lr_decay or lr_half == true then @@ -408,21 +415,21 @@ if commands["train"] == 1 then if ppl_rec[iter].valid < ppl_last then ppl_last = ppl_rec[iter].valid end - printf("\n") + nerv.printf("\n") nerv.LMUtil.wait(2) end nerv.info("saving final nn to param.final") os.execute('cp ' .. global_conf.param_fn .. '.' .. tostring(final_iter) .. ' ' .. global_conf.param_fn .. '.final') - printf("===VALIDATION PPL record===\n") + nerv.printf("===VALIDATION PPL record===\n") for i, _ in pairs(ppl_rec) do - printf("<ITER%d LR%.5f train:%.3f valid:%.3f test:%.3f> \n", i, ppl_rec[i].lr, ppl_rec[i].train, ppl_rec[i].valid, ppl_rec[i].test) + nerv.printf("<ITER%d LR%.5f train:%.3f valid:%.3f test:%.3f> \n", i, ppl_rec[i].lr, ppl_rec[i].train, ppl_rec[i].valid, ppl_rec[i].test) end - printf("\n") + nerv.printf("\n") end --if commands["train"] if commands["test"] == 1 then - printf("===FINAL TEST===\n") + nerv.printf("===FINAL TEST===\n") global_conf.sche_log_pre = "[SCHEDULER FINAL_TEST]:" if final_iter ~= -1 and test_iter == -1 then test_iter = final_iter diff --git a/nerv/examples/lmptb/m-tests/sutil_test.lua b/nerv/examples/lmptb/m-tests/sutil_test.lua index 95660d9..08a812c 100644 --- a/nerv/examples/lmptb/m-tests/sutil_test.lua +++ b/nerv/examples/lmptb/m-tests/sutil_test.lua @@ -10,3 +10,6 @@ for p, v in pairs(coms) do end nerv.sss = "sss" print(nerv.sss) + +fh = assert(io.open("/home/slhome/txh18/workspace/nerv/play/try", "w")) +fh:write("!!!2") diff --git a/nerv/tnn/sutil.lua b/nerv/tnn/sutil.lua index d88bd8e..78f88c0 100644 --- a/nerv/tnn/sutil.lua +++ b/nerv/tnn/sutil.lua @@ -62,3 +62,18 @@ function Util.parse_commands_set(str) end return coms end + +function Util.log_redirect(fn) + nerv.log_fh = assert(io.open(fn, "w")) + nerv.info("CAUTION[LOG_REDIRECT], all nerv.printf/info/warning/error calls will be double-written to %s", fn) + nerv.printf = + function (fmt, ...) + io.write(nerv.sprintf(fmt, ...)) + nerv.log_fh:write(nerv.sprintf(fmt, ...)) + end + nerv.error = + function (fmt, ...) + nerv.log_fh:write(nerv.sprintf("[nerv] internal error:" .. fmt .. "\n", ...)) + error(nerv.sprintf("[nerv] internal error: " .. fmt .. "\n", ...)) + end +end |