diff options
author | txh18 <[email protected]> | 2015-12-07 15:57:16 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-12-07 15:57:16 +0800 |
commit | 775d35373980b357ae77c9694f71d98e8ef103d6 (patch) | |
tree | a1a52583bdd1480b3112eec9c091db7d17c80b48 | |
parent | d2a69f2660b6a7a724605cce4d88fbbbdeec2f68 (diff) |
small bugs fix
-rw-r--r-- | nerv/examples/lmptb/bilstmlm_ptb_main.lua | 15 | ||||
-rw-r--r-- | nerv/examples/lmptb/lmptb/lmseqreader.lua | 2 | ||||
-rw-r--r-- | nerv/tnn/sutil.lua | 1 |
3 files changed, 8 insertions, 10 deletions
diff --git a/nerv/examples/lmptb/bilstmlm_ptb_main.lua b/nerv/examples/lmptb/bilstmlm_ptb_main.lua index 45a724b..b896bed 100644 --- a/nerv/examples/lmptb/bilstmlm_ptb_main.lua +++ b/nerv/examples/lmptb/bilstmlm_ptb_main.lua @@ -73,9 +73,6 @@ function prepare_layers(global_conf) local du = false - --local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}} - --local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du, ["pr"] = pr}} - local layers = { ["nerv.LSTMLayerT"] = { ["lstmFL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size, global_conf.hidden_size}, ["pr"] = pr}}, @@ -97,7 +94,7 @@ function prepare_layers(global_conf) }, ["nerv.AffineLayer"] = { - ["biAffineL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["lambda"] = {1, 1}}}, + ["biAffineL1"] = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["pr"] = pr, ["lambda"] = {1, 1}}}, ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du, ["pr"] = pr}}, }, @@ -312,10 +309,10 @@ global_conf = { else -valid_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn' -train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn' -test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn' -vocab_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text-chn' +valid_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' +train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' +test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' +vocab_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' global_conf = { lrate = 0.01, wcost = 1e-5, momentum = 0, @@ -327,7 +324,7 @@ global_conf = { layer_num = 1, chunk_size = 20, batch_size = 10, - max_iter = 3, + max_iter = 2, param_random = function() return (math.random() / 5 - 0.1) end, dropout_str = "0", diff --git a/nerv/examples/lmptb/lmptb/lmseqreader.lua b/nerv/examples/lmptb/lmptb/lmseqreader.lua index 40471d5..ed791d2 100644 --- a/nerv/examples/lmptb/lmptb/lmseqreader.lua +++ b/nerv/examples/lmptb/lmptb/lmseqreader.lua @@ -179,7 +179,7 @@ function LMReader:get_batch(feeds) if got_new == false then nerv.info("lmseqreader file ends, printing stats...") - print("al_sen_start:", self.stat.al_sen_start) + nerv.printf("al_sen_start:%s\n", tostring(self.stat.al_sen_start)) return false else diff --git a/nerv/tnn/sutil.lua b/nerv/tnn/sutil.lua index 78f88c0..6a968b7 100644 --- a/nerv/tnn/sutil.lua +++ b/nerv/tnn/sutil.lua @@ -70,6 +70,7 @@ function Util.log_redirect(fn) function (fmt, ...) io.write(nerv.sprintf(fmt, ...)) nerv.log_fh:write(nerv.sprintf(fmt, ...)) + nerv.log_fh:flush() end nerv.error = function (fmt, ...) |