diff options
author | txh18 <cloudygooseg@gmail.com> | 2015-11-06 19:57:46 +0800 |
---|---|---|
committer | txh18 <cloudygooseg@gmail.com> | 2015-11-06 19:57:46 +0800 |
commit | ae4e5218cd96e3888b7eaa90412b2279d14337f3 (patch) | |
tree | 05f030ef5b44de3c3dbf7e34f24b7279492a085b /nerv/examples/lmptb/m-tests | |
parent | 26db912e38c3446961831d17be6b4508ec508bca (diff) |
first small tnn test seems to work
Diffstat (limited to 'nerv/examples/lmptb/m-tests')
-rw-r--r-- | nerv/examples/lmptb/m-tests/dagl_test.lua | 180 | ||||
-rw-r--r-- | nerv/examples/lmptb/m-tests/some-text | 2 |
2 files changed, 1 insertions, 181 deletions
diff --git a/nerv/examples/lmptb/m-tests/dagl_test.lua b/nerv/examples/lmptb/m-tests/dagl_test.lua deleted file mode 100644 index 6bd11c8..0000000 --- a/nerv/examples/lmptb/m-tests/dagl_test.lua +++ /dev/null @@ -1,180 +0,0 @@ -require 'lmptb.lmvocab' -require 'lmptb.lmfeeder' -require 'lmptb.lmutil' -require 'lmptb.layer.init' -require 'lmptb.lmseqreader' -require 'rnn.tnn' - ---[[global function rename]]-- -printf = nerv.printf ---[[global function rename ends]]-- - ---global_conf: table ---first_time: bool ---Returns: a ParamRepo -function prepare_parameters(global_conf, first_time) - printf("%s preparing parameters...\n", global_conf.sche_log_pre) - - if (first_time) then - ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf) - ltp_ih.trans = global_conf.cumat_type(global_conf.vocab:size() + 1, global_conf.hidden_size) --index 0 is for zero, others correspond to vocab index(starting from 1) - ltp_ih.trans:generate(global_conf.param_random) - ltp_ih.trans[0]:fill(0) - - ltp_hh = nerv.LinearTransParam("ltp_hh", global_conf) - ltp_hh.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.hidden_size) - ltp_hh.trans:generate(global_conf.param_random) - - ltp_ho = nerv.LinearTransParam("ltp_ho", global_conf) - ltp_ho.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.vocab:size()) - ltp_ho.trans:generate(global_conf.param_random) - - bp_h = nerv.BiasParam("bp_h", global_conf) - bp_h.trans = global_conf.cumat_type(1, global_conf.hidden_size) - bp_h.trans:generate(global_conf.param_random) - - bp_o = nerv.BiasParam("bp_o", global_conf) - bp_o.trans = global_conf.cumat_type(1, global_conf.vocab:size()) - bp_o.trans:generate(global_conf.param_random) - - local f = nerv.ChunkFile(global_conf.param_fn, 'w') - f:write_chunk(ltp_ih) - f:write_chunk(ltp_hh) - f:write_chunk(ltp_ho) - f:write_chunk(bp_h) - f:write_chunk(bp_o) - f:close() - end - - local paramRepo = nerv.ParamRepo() - paramRepo:import({global_conf.param_fn}, nil, global_conf) - - printf("%s preparing parameters end.\n", global_conf.sche_log_pre) - - return paramRepo -end - ---global_conf: table ---Returns: nerv.LayerRepo -function prepare_layers(global_conf, paramRepo) - printf("%s preparing layers...\n", global_conf.sche_log_pre) - - local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}} - - local layers = { - ["nerv.IndRecurrentLayer"] = { - ["recurrentL1"] = recurrentLconfig, - }, - - ["nerv.SelectLinearLayer"] = { - ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}, - }, - - ["nerv.SigmoidLayer"] = { - ["sigmoidL1"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}} - }, - - ["nerv.AffineLayer"] = { - ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}}}, - }, - - ["nerv.SoftmaxCELayer"] = { - ["softmaxL"] = {{}, {["dim_in"] = {global_conf.vocab:size(), global_conf.vocab:size()}, ["dim_out"] = {1}}}, - }, - } - - --[[ --we do not need those in the new rnn framework - printf("%s adding %d bptt layers...\n", global_conf.sche_log_pre, global_conf.bptt) - for i = 1, global_conf.bptt do - layers["nerv.IndRecurrentLayer"]["recurrentL" .. (i + 1)] = recurrentLconfig - layers["nerv.SigmoidLayer"]["sigmoidL" .. (i + 1)] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}} - layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}} - end - --]] - - local layerRepo = nerv.LayerRepo(layers, paramRepo, global_conf) - printf("%s preparing layers end.\n", global_conf.sche_log_pre) - return layerRepo -end - ---global_conf: table ---layerRepo: nerv.LayerRepo ---Returns: a nerv.TNN -function prepare_dagLayer(global_conf, layerRepo) - printf("%s Initing TNN ...\n", global_conf.sche_log_pre) - - --input: input_w, input_w, ... input_w_now, last_activation - local connections_t = { - {"<input>[1]", "selectL1[1]", 0}, - {"selectL1[1]", "recurrentL1[1]", 0}, - {"recurrentL1[1]", "sigmoidL1[1]", 0}, - {"sigmoidL1[1]", "outputL[1]", 0}, - {"sigmoidL1[1]", "recurrentL1[2]", 1}, - {"outputL[1]", "softmaxL[1]", 0}, - {"<input>[2]", "softmaxL[2]", 0}, - {"softmaxL[1]", "<output>[1]", 0} - } - - --[[ - printf("%s printing DAG connections:\n", global_conf.sche_log_pre) - for key, value in pairs(connections_t) do - printf("\t%s->%s\n", key, value) - end - ]]-- - - local tnn = nerv.TNN("TNN", global_conf, {["dim_in"] = {1, global_conf.vocab:size()}, ["dim_out"] = {1}, ["sub_layers"] = layerRepo, - ["connections"] = connections_t, - }) - printf("%s Initing TNN end.\n", global_conf.sche_log_pre) - return tnn -end - -local train_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' -local test_fn = '/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/m-tests/some-text' - -local global_conf = { - lrate = 1, wcost = 1e-6, momentum = 0, - cumat_type = nerv.CuMatrixFloat, - mmat_type = nerv.CuMatrixFloat, - nn_act_default = 0, - - hidden_size = 20, - chunk_size = 5, - batch_size = 3, - max_iter = 18, - param_random = function() return (math.random() / 5 - 0.1) end, - independent = true, - - train_fn = train_fn, - test_fn = test_fn, - sche_log_pre = "[SCHEDULER]:", - log_w_num = 10, --give a message when log_w_num words have been processed - timer = nerv.Timer() -} -global_conf.work_dir = '/home/slhome/txh18/workspace/nerv/play/dagL_test' -global_conf.param_fn = global_conf.work_dir.."/params" - -local vocab = nerv.LMVocab() -global_conf["vocab"] = vocab -global_conf.vocab:build_file(global_conf.train_fn, false) -local paramRepo = prepare_parameters(global_conf, true) -local layerRepo = prepare_layers(global_conf, paramRepo) -local tnn = prepare_dagLayer(global_conf, layerRepo) -tnn:init(global_conf.batch_size, global_conf.chunk_size) - -local reader = nerv.LMSeqReader(global_conf, global_conf.batch_size, global_conf.chunk_size, global_conf.vocab) -reader:open_file(global_conf.train_fn) - -local batch_num = 1 -while (1) do - local r, feeds - r, feeds = tnn:getFeedFromReader(reader) - if (r == false) then break end - for j = 1, global_conf.chunk_size, 1 do - for i = 1, global_conf.batch_size, 1 do - printf("%s[L(%s)] ", feeds.inputs_s[j][i], feeds.labels_s[j][i]) --vocab:get_word_str(input[i][j]).id - end - printf("\n") - end - printf("\n") -end diff --git a/nerv/examples/lmptb/m-tests/some-text b/nerv/examples/lmptb/m-tests/some-text index cdfbd2c..da4bea9 100644 --- a/nerv/examples/lmptb/m-tests/some-text +++ b/nerv/examples/lmptb/m-tests/some-text @@ -1,6 +1,6 @@ </s> aa bb cc aa bb cc aa bb cc aa bb cc aa bb cc aa </s> </s> aa bb cc aa bb cc aa bb cc aa </s> -</s> aa bb cc aa bb cc aa bb cc aa </s> +</s> bb cc aa bb cc aa bb cc aa </s> </s> aa bb cc aa </s> </s> aa bb cc aa </s> </s> aa bb cc aa </s> |