summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorcloudygoose <[email protected]>2015-06-21 10:50:46 +0800
committercloudygoose <[email protected]>2015-06-21 10:50:46 +0800
commitad2c4013dec71667d6bbd5e9a249df2085208507 (patch)
tree474c536ff4a7d757bb61915a1bfc7a073f40d09f /examples
parent839d938df0d83ec311c5d1299923c667adff6a87 (diff)
parentf3f4e74eb4dbb8829e5ee136ba4b0c0a7938b551 (diff)
Merge upstream 'parameter update big-change'.
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'examples')
-rw-r--r--examples/asr_trainer.lua42
-rw-r--r--examples/swb_baseline.lua87
-rw-r--r--examples/test_dnn_layers.lua4
-rw-r--r--examples/test_nn_lib.lua18
4 files changed, 81 insertions, 70 deletions
diff --git a/examples/asr_trainer.lua b/examples/asr_trainer.lua
index 05d770f..a5727be 100644
--- a/examples/asr_trainer.lua
+++ b/examples/asr_trainer.lua
@@ -1,50 +1,58 @@
function build_trainer(ifname)
- local param_repo = make_param_repo(ifname)
+ local param_repo = nerv.ParamRepo()
+ param_repo:import(ifname, nil, gconf)
local sublayer_repo = make_sublayer_repo(param_repo)
local layer_repo = make_layer_repo(sublayer_repo, param_repo)
local crit = get_criterion_layer(sublayer_repo)
local network = get_network(layer_repo)
+ local input_order = get_input_order()
local iterative_trainer = function (prefix, scp_file, bp)
gconf.randomize = bp
-- build buffer
- local buffer = make_buffer(make_reader(scp_file, layer_repo))
+ local buffer = make_buffer(make_readers(scp_file, layer_repo))
-- initialize the network
network:init(gconf.batch_size)
gconf.cnt = 0
+ err_input = {nerv.CuMatrixFloat(256, 1)}
+ err_input[1]:fill(1)
for data in buffer.get_data, buffer do
-- prine stat periodically
gconf.cnt = gconf.cnt + 1
if gconf.cnt == 1000 then
- print_stat(crit)
+ print_stat(sublayer_repo)
+ nerv.CuMatrix.print_profile()
+ nerv.CuMatrix.clear_profile()
gconf.cnt = 0
+ -- break
end
+ local input = {}
-- if gconf.cnt == 100 then break end
-
- input = {data.main_scp, data.phone_state}
- output = {}
- err_input = {}
+ for i, id in ipairs(input_order) do
+ if data[id] == nil then
+ nerv.error("input data %s not found", id)
+ end
+ table.insert(input, data[id])
+ end
+ local output = {nerv.CuMatrixFloat(256, 1)}
err_output = {input[1]:create()}
network:propagate(input, output)
if bp then
- network:back_propagate(err_output, err_input, input, output)
+ network:back_propagate(err_input, err_output, input, output)
network:update(err_input, input, output)
end
-- collect garbage in-time to save GPU memory
collectgarbage("collect")
end
- print_stat(crit)
+ print_stat(sublayer_repo)
nerv.CuMatrix.print_profile()
+ nerv.CuMatrix.clear_profile()
if (not bp) and prefix ~= nil then
nerv.info("writing back...")
local fname = string.format("%s_cv%.3f.nerv",
- prefix, get_accuracy(crit))
- cf = nerv.ChunkFile(fname, "w")
- for i, p in ipairs(network:get_params()) do
- cf:write_chunk(p)
- end
- cf:close()
+ prefix, get_accuracy(sublayer_repo))
+ network:get_params():export(fname, nil)
end
- return get_accuracy(crit)
+ return get_accuracy(sublayer_repo)
end
return iterative_trainer
end
@@ -73,7 +81,7 @@ for i = 1, max_iter do
local accu_new = trainer(
string.format("%s_%s_iter_%d_lr%f_tr%.3f",
string.gsub(
- (string.gsub(pf0, "(.*/)(.*)", "%2")),
+ (string.gsub(pf0[1], "(.*/)(.*)", "%2")),
"(.*)%..*", "%1"),
os.date("%Y%m%d%H%M%S"),
i, gconf.lrate,
diff --git a/examples/swb_baseline.lua b/examples/swb_baseline.lua
index 28cc6d5..8b7e01a 100644
--- a/examples/swb_baseline.lua
+++ b/examples/swb_baseline.lua
@@ -6,14 +6,10 @@ gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
tr_scp = "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
cv_scp = "/slfs1/users/mfy43/swb_ivec/train_cv.scp",
htk_conf = "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
- global_transf = "/slfs1/users/mfy43/swb_global_transf.nerv",
- initialized_param = "/slfs1/users/mfy43/swb_init.nerv",
+ initialized_param = {"/slfs1/users/mfy43/swb_init.nerv",
+ "/slfs1/users/mfy43/swb_global_transf.nerv"},
debug = false}
-function make_param_repo(param_file)
- return nerv.ParamRepo({param_file, gconf.global_transf})
-end
-
function make_sublayer_repo(param_repo)
return nerv.LayerRepo(
{
@@ -60,7 +56,7 @@ function make_sublayer_repo(param_repo)
},
["nerv.SoftmaxCELayer"] =
{
- criterion = {{}, {dim_in = {3001, 1}, dim_out = {}, compressed = true}}
+ ce_crit = {{}, {dim_in = {3001, 1}, dim_out = {1}, compressed = true}}
}
}, param_repo, gconf)
end
@@ -82,7 +78,7 @@ function make_layer_repo(sublayer_repo, param_repo)
}
}},
main = {{}, {
- dim_in = {429, 1}, dim_out = {},
+ dim_in = {429, 1}, dim_out = {1},
sub_layers = sublayer_repo,
connections = {
["<input>[1]"] = "affine0[1]",
@@ -100,8 +96,9 @@ function make_layer_repo(sublayer_repo, param_repo)
["sigmoid5[1]"] = "affine6[1]",
["affine6[1]"] = "sigmoid6[1]",
["sigmoid6[1]"] = "affine7[1]",
- ["affine7[1]"] = "criterion[1]",
- ["<input>[2]"] = "criterion[2]"
+ ["affine7[1]"] = "ce_crit[1]",
+ ["<input>[2]"] = "ce_crit[2]",
+ ["ce_crit[1]"] = "<output>[1]"
}
}}
}
@@ -109,55 +106,61 @@ function make_layer_repo(sublayer_repo, param_repo)
end
function get_criterion_layer(sublayer_repo)
- return sublayer_repo:get_layer("criterion")
+ return sublayer_repo:get_layer("ce_crit")
end
function get_network(layer_repo)
return layer_repo:get_layer("main")
end
-function make_reader(scp_file, layer_repo)
- return nerv.TNetReader(gconf,
- {
- id = "main_scp",
- scp_file = scp_file,
- conf_file = gconf.htk_conf,
- frm_ext = gconf.frm_ext,
- mlfs = {
- phone_state = {
- file = "/slfs1/users/mfy43/swb_ivec/ref.mlf",
- format = "map",
- format_arg = "/slfs1/users/mfy43/swb_ivec/dict",
- dir = "*/",
- ext = "lab"
- }
- },
- global_transf = layer_repo:get_layer("global_transf")
- })
+function make_readers(scp_file, layer_repo)
+ return {
+ {reader = nerv.TNetReader(gconf,
+ {
+ id = "main_scp",
+ scp_file = scp_file,
+ conf_file = gconf.htk_conf,
+ frm_ext = gconf.frm_ext,
+ mlfs = {
+ phone_state = {
+ file = "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ format = "map",
+ format_arg = "/slfs1/users/mfy43/swb_ivec/dict",
+ dir = "*/",
+ ext = "lab"
+ }
+ },
+ global_transf = layer_repo:get_layer("global_transf")
+ }),
+ data = {main_scp = 429, phone_state = 1}}
+ }
end
-function make_buffer(reader, buffer)
+function make_buffer(readers)
return nerv.SGDBuffer(gconf,
{
buffer_size = gconf.buffer_size,
randomize = gconf.randomize,
- readers = {
- { reader = reader,
- data = {main_scp = 429, phone_state = 1}}
- }
+ readers = readers
})
end
-function get_accuracy(crit)
- return crit.total_correct / crit.total_frames * 100
+function get_input_order()
+ return {"main_scp", "phone_state"}
+end
+
+function get_accuracy(sublayer_repo)
+ local ce_crit = sublayer_repo:get_layer("ce_crit")
+ return ce_crit.total_correct / ce_crit.total_frames * 100
end
-function print_stat(crit)
+function print_stat(sublayer_repo)
+ local ce_crit = sublayer_repo:get_layer("ce_crit")
nerv.info("*** training stat begin ***")
- nerv.utils.printf("cross entropy:\t\t%.8f\n", crit.total_ce)
- nerv.utils.printf("correct:\t\t%d\n", crit.total_correct)
- nerv.utils.printf("frames:\t\t\t%d\n", crit.total_frames)
- nerv.utils.printf("err/frm:\t\t%.8f\n", crit.total_ce / crit.total_frames)
- nerv.utils.printf("accuracy:\t\t%.3f%%\n", get_accuracy(crit))
+ nerv.printf("cross entropy:\t\t%.8f\n", ce_crit.total_ce)
+ nerv.printf("correct:\t\t%d\n", ce_crit.total_correct)
+ nerv.printf("frames:\t\t\t%d\n", ce_crit.total_frames)
+ nerv.printf("err/frm:\t\t%.8f\n", ce_crit.total_ce / ce_crit.total_frames)
+ nerv.printf("accuracy:\t\t%.3f%%\n", get_accuracy(sublayer_repo))
nerv.info("*** training stat end ***")
end
diff --git a/examples/test_dnn_layers.lua b/examples/test_dnn_layers.lua
index bf81f7b..64c0dec 100644
--- a/examples/test_dnn_layers.lua
+++ b/examples/test_dnn_layers.lua
@@ -69,8 +69,8 @@ for i = 0, 3 do
print(err_output1[1])
print("err_output2")
print(err_output2[1])
- nerv.utils.printf("cross entropy: %.8f\n", sm.total_ce)
- nerv.utils.printf("frames: %.8f\n", sm.total_frames)
+ nerv.printf("cross entropy: %.8f\n", sm.total_ce)
+ nerv.printf("frames: %.8f\n", sm.total_frames)
end
print("linear")
print(af.ltp.trans)
diff --git a/examples/test_nn_lib.lua b/examples/test_nn_lib.lua
index 6fdbd67..5444810 100644
--- a/examples/test_nn_lib.lua
+++ b/examples/test_nn_lib.lua
@@ -144,17 +144,17 @@ for data in buffer.get_data, buffer do
main:back_propagate(err_output, err_input, input, output)
main:update(err_input, input, output)
--- nerv.utils.printf("cross entropy: %.8f\n", sm.total_ce)
--- nerv.utils.printf("correct: %d\n", sm.total_correct)
--- nerv.utils.printf("frames: %d\n", sm.total_frames)
--- nerv.utils.printf("err/frm: %.8f\n", sm.total_ce / sm.total_frames)
--- nerv.utils.printf("accuracy: %.8f\n", sm.total_correct / sm.total_frames)
+-- nerv.printf("cross entropy: %.8f\n", sm.total_ce)
+-- nerv.printf("correct: %d\n", sm.total_correct)
+-- nerv.printf("frames: %d\n", sm.total_frames)
+-- nerv.printf("err/frm: %.8f\n", sm.total_ce / sm.total_frames)
+-- nerv.printf("accuracy: %.8f\n", sm.total_correct / sm.total_frames)
collectgarbage("collect")
end
-nerv.utils.printf("cross entropy: %.8f\n", sm.total_ce)
-nerv.utils.printf("correct: %d\n", sm.total_correct)
-nerv.utils.printf("accuracy: %.3f%%\n", sm.total_correct / sm.total_frames * 100)
-nerv.utils.printf("writing back...\n")
+nerv.printf("cross entropy: %.8f\n", sm.total_ce)
+nerv.printf("correct: %d\n", sm.total_correct)
+nerv.printf("accuracy: %.3f%%\n", sm.total_correct / sm.total_frames * 100)
+nerv.printf("writing back...\n")
cf = nerv.ChunkFile("output.nerv", "w")
for i, p in ipairs(main:get_params()) do
print(p)