aboutsummaryrefslogtreecommitdiff
path: root/nerv
diff options
context:
space:
mode:
Diffstat (limited to 'nerv')
m---------nerv/doc/gh-pages0
-rw-r--r--nerv/examples/asr_trainer.lua11
-rw-r--r--nerv/examples/swb_baseline.lua68
-rw-r--r--nerv/examples/swb_baseline_basic.lua161
-rw-r--r--nerv/nn/layer_dag.lua3
-rw-r--r--nerv/nn/layer_repo.lua8
6 files changed, 226 insertions, 25 deletions
diff --git a/nerv/doc/gh-pages b/nerv/doc/gh-pages
new file mode 160000
+Subproject 195d95bb663258e3b1c3962f946db9c374018a8
diff --git a/nerv/examples/asr_trainer.lua b/nerv/examples/asr_trainer.lua
index 8dfb2ac..dcadfa3 100644
--- a/nerv/examples/asr_trainer.lua
+++ b/nerv/examples/asr_trainer.lua
@@ -1,8 +1,7 @@
function build_trainer(ifname)
local param_repo = nerv.ParamRepo()
param_repo:import(ifname, nil, gconf)
- local sublayer_repo = make_sublayer_repo(param_repo)
- local layer_repo = make_layer_repo(sublayer_repo, param_repo)
+ local layer_repo = make_layer_repo(param_repo)
local network = get_network(layer_repo)
local input_order = get_input_order()
local iterative_trainer = function (prefix, scp_file, bp)
@@ -18,7 +17,7 @@ function build_trainer(ifname)
-- prine stat periodically
gconf.cnt = gconf.cnt + 1
if gconf.cnt == 1000 then
- print_stat(sublayer_repo)
+ print_stat(layer_repo)
nerv.CuMatrix.print_profile()
nerv.CuMatrix.clear_profile()
gconf.cnt = 0
@@ -42,16 +41,16 @@ function build_trainer(ifname)
-- collect garbage in-time to save GPU memory
collectgarbage("collect")
end
- print_stat(sublayer_repo)
+ print_stat(layer_repo)
nerv.CuMatrix.print_profile()
nerv.CuMatrix.clear_profile()
if (not bp) and prefix ~= nil then
nerv.info("writing back...")
local fname = string.format("%s_cv%.3f.nerv",
- prefix, get_accuracy(sublayer_repo))
+ prefix, get_accuracy(layer_repo))
network:get_params():export(fname, nil)
end
- return get_accuracy(sublayer_repo)
+ return get_accuracy(layer_repo)
end
return iterative_trainer
end
diff --git a/nerv/examples/swb_baseline.lua b/nerv/examples/swb_baseline.lua
index 7783f2a..0e9f897 100644
--- a/nerv/examples/swb_baseline.lua
+++ b/nerv/examples/swb_baseline.lua
@@ -10,8 +10,8 @@ gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
"/slfs1/users/mfy43/swb_global_transf.nerv"},
debug = false}
-function make_sublayer_repo(param_repo)
- return nerv.LayerRepo(
+function make_layer_repo(param_repo)
+ local layer_repo = nerv.LayerRepo(
{
-- global transf
["nerv.BiasLayer"] =
@@ -54,21 +54,23 @@ function make_sublayer_repo(param_repo)
sigmoid5 = {{}, {dim_in = {2048}, dim_out = {2048}}},
sigmoid6 = {{}, {dim_in = {2048}, dim_out = {2048}}}
},
- ["nerv.SoftmaxCELayer"] =
+ ["nerv.SoftmaxCELayer"] = -- softmax + ce criterion layer for finetune output
{
ce_crit = {{}, {dim_in = {3001, 1}, dim_out = {1}, compressed = true}}
+ },
+ ["nerv.SoftmaxLayer"] = -- softmax for decode output
+ {
+ softmax = {{}, {dim_in = {3001}, dim_out = {3001}}}
}
}, param_repo, gconf)
-end
-function make_layer_repo(sublayer_repo, param_repo)
- return nerv.LayerRepo(
+ layer_repo:add_layers(
{
["nerv.DAGLayer"] =
{
global_transf = {{}, {
dim_in = {429}, dim_out = {429},
- sub_layers = sublayer_repo,
+ sub_layers = layer_repo,
connections = {
["<input>[1]"] = "blayer1[1]",
["blayer1[1]"] = "wlayer1[1]",
@@ -78,8 +80,8 @@ function make_layer_repo(sublayer_repo, param_repo)
}
}},
main = {{}, {
- dim_in = {429, 1}, dim_out = {1},
- sub_layers = sublayer_repo,
+ dim_in = {429}, dim_out = {3001},
+ sub_layers = layer_repo,
connections = {
["<input>[1]"] = "affine0[1]",
["affine0[1]"] = "sigmoid0[1]",
@@ -96,17 +98,51 @@ function make_layer_repo(sublayer_repo, param_repo)
["sigmoid5[1]"] = "affine6[1]",
["affine6[1]"] = "sigmoid6[1]",
["sigmoid6[1]"] = "affine7[1]",
- ["affine7[1]"] = "ce_crit[1]",
+ ["affine7[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+
+ layer_repo:add_layers(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ ce_output = {{}, {
+ dim_in = {429, 1}, dim_out = {1},
+ sub_layers = layer_repo,
+ connections = {
+ ["<input>[1]"] = "main[1]",
+ ["main[1]"] = "ce_crit[1]",
["<input>[2]"] = "ce_crit[2]",
["ce_crit[1]"] = "<output>[1]"
}
+ }},
+ softmax_output = {{}, {
+ dim_in = {429}, dim_out = {3001},
+ sub_layers = layer_repo,
+ connections = {
+ ["<input>[1]"] = "main[1]",
+ ["main[1]"] = "softmax[1]",
+ ["softmax[1]"] = "<output>[1]"
+ }
}}
}
}, param_repo, gconf)
+
+ return layer_repo
end
function get_network(layer_repo)
- return layer_repo:get_layer("main")
+ return layer_repo:get_layer("ce_output")
+end
+
+function get_decode_network(layer_repo)
+ return layer_repo:get_layer("softmax_output")
+end
+
+function get_global_transf(layer_repo)
+ return layer_repo:get_layer("global_transf")
end
function make_readers(scp_file, layer_repo)
@@ -145,18 +181,18 @@ function get_input_order()
return {"main_scp", "phone_state"}
end
-function get_accuracy(sublayer_repo)
- local ce_crit = sublayer_repo:get_layer("ce_crit")
+function get_accuracy(layer_repo)
+ local ce_crit = layer_repo:get_layer("ce_crit")
return ce_crit.total_correct / ce_crit.total_frames * 100
end
-function print_stat(sublayer_repo)
- local ce_crit = sublayer_repo:get_layer("ce_crit")
+function print_stat(layer_repo)
+ local ce_crit = layer_repo:get_layer("ce_crit")
nerv.info("*** training stat begin ***")
nerv.printf("cross entropy:\t\t%.8f\n", ce_crit.total_ce)
nerv.printf("correct:\t\t%d\n", ce_crit.total_correct)
nerv.printf("frames:\t\t\t%d\n", ce_crit.total_frames)
nerv.printf("err/frm:\t\t%.8f\n", ce_crit.total_ce / ce_crit.total_frames)
- nerv.printf("accuracy:\t\t%.3f%%\n", get_accuracy(sublayer_repo))
+ nerv.printf("accuracy:\t\t%.3f%%\n", get_accuracy(layer_repo))
nerv.info("*** training stat end ***")
end
diff --git a/nerv/examples/swb_baseline_basic.lua b/nerv/examples/swb_baseline_basic.lua
new file mode 100644
index 0000000..c47ec3e
--- /dev/null
+++ b/nerv/examples/swb_baseline_basic.lua
@@ -0,0 +1,161 @@
+require 'htk_io'
+gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ frm_ext = 5,
+ tr_scp = "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ cv_scp = "/slfs1/users/mfy43/swb_ivec/train_cv.scp",
+ htk_conf = "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ initialized_param = {"/slfs1/users/mfy43/swb_init.nerv",
+ "/slfs1/users/mfy43/swb_global_transf.nerv"},
+ debug = false}
+
+function make_layer_repo(param_repo)
+ local layer_repo = nerv.LayerRepo(
+ {
+ -- global transf
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ -- biased linearity
+ ["nerv.AffineLayer"] =
+ {
+ affine0 = {{ltp = "affine0_ltp", bp = "affine0_bp"},
+ {dim_in = {429}, dim_out = {2048}}},
+ affine1 = {{ltp = "affine1_ltp", bp = "affine1_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine2 = {{ltp = "affine2_ltp", bp = "affine2_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine3 = {{ltp = "affine3_ltp", bp = "affine3_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine4 = {{ltp = "affine4_ltp", bp = "affine4_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine5 = {{ltp = "affine5_ltp", bp = "affine5_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine6 = {{ltp = "affine6_ltp", bp = "affine6_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine7 = {{ltp = "affine7_ltp", bp = "affine7_bp"},
+ {dim_in = {2048}, dim_out = {3001}}}
+ },
+ ["nerv.SigmoidLayer"] =
+ {
+ sigmoid0 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid1 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid2 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid3 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid4 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid5 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid6 = {{}, {dim_in = {2048}, dim_out = {2048}}}
+ },
+ ["nerv.SoftmaxCELayer"] =
+ {
+ ce_crit = {{}, {dim_in = {3001, 1}, dim_out = {1}, compressed = true}}
+ }
+ }, param_repo, gconf)
+
+ layer_repo:add_layers(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ global_transf = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = layer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }},
+ main = {{}, {
+ dim_in = {429, 1}, dim_out = {1},
+ sub_layers = layer_repo,
+ connections = {
+ ["<input>[1]"] = "affine0[1]",
+ ["affine0[1]"] = "sigmoid0[1]",
+ ["sigmoid0[1]"] = "affine1[1]",
+ ["affine1[1]"] = "sigmoid1[1]",
+ ["sigmoid1[1]"] = "affine2[1]",
+ ["affine2[1]"] = "sigmoid2[1]",
+ ["sigmoid2[1]"] = "affine3[1]",
+ ["affine3[1]"] = "sigmoid3[1]",
+ ["sigmoid3[1]"] = "affine4[1]",
+ ["affine4[1]"] = "sigmoid4[1]",
+ ["sigmoid4[1]"] = "affine5[1]",
+ ["affine5[1]"] = "sigmoid5[1]",
+ ["sigmoid5[1]"] = "affine6[1]",
+ ["affine6[1]"] = "sigmoid6[1]",
+ ["sigmoid6[1]"] = "affine7[1]",
+ ["affine7[1]"] = "ce_crit[1]",
+ ["<input>[2]"] = "ce_crit[2]",
+ ["ce_crit[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+ return layer_repo
+end
+
+function get_network(layer_repo)
+ return layer_repo:get_layer("main")
+end
+
+function make_readers(scp_file, layer_repo)
+ return {
+ {reader = nerv.TNetReader(gconf,
+ {
+ id = "main_scp",
+ scp_file = scp_file,
+ conf_file = gconf.htk_conf,
+ frm_ext = gconf.frm_ext,
+ mlfs = {
+ phone_state = {
+ file = "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ format = "map",
+ format_arg = "/slfs1/users/mfy43/swb_ivec/dict",
+ dir = "*/",
+ ext = "lab"
+ }
+ },
+ global_transf = layer_repo:get_layer("global_transf")
+ }),
+ data = {main_scp = 429, phone_state = 1}}
+ }
+end
+
+function make_buffer(readers)
+ return nerv.SGDBuffer(gconf,
+ {
+ buffer_size = gconf.buffer_size,
+ randomize = gconf.randomize,
+ readers = readers
+ })
+end
+
+function get_input_order()
+ return {"main_scp", "phone_state"}
+end
+
+function get_accuracy(layer_repo)
+ local ce_crit = layer_repo:get_layer("ce_crit")
+ return ce_crit.total_correct / ce_crit.total_frames * 100
+end
+
+function print_stat(layer_repo)
+ local ce_crit = layer_repo:get_layer("ce_crit")
+ nerv.info("*** training stat begin ***")
+ nerv.printf("cross entropy:\t\t%.8f\n", ce_crit.total_ce)
+ nerv.printf("correct:\t\t%d\n", ce_crit.total_correct)
+ nerv.printf("frames:\t\t\t%d\n", ce_crit.total_frames)
+ nerv.printf("err/frm:\t\t%.8f\n", ce_crit.total_ce / ce_crit.total_frames)
+ nerv.printf("accuracy:\t\t%.3f%%\n", get_accuracy(layer_repo))
+ nerv.info("*** training stat end ***")
+end
diff --git a/nerv/nn/layer_dag.lua b/nerv/nn/layer_dag.lua
index e5c1ac7..e9d4d86 100644
--- a/nerv/nn/layer_dag.lua
+++ b/nerv/nn/layer_dag.lua
@@ -112,7 +112,7 @@ function DAGLayer:__init(id, global_conf, layer_conf)
end
end
for i = 1, #queue do
- nerv.info("enqueued layer: %s", queue[i].layer.id)
+ nerv.info("enqueued layer: %s %s", queue[i].layer, queue[i].layer.id)
end
for id, ref in pairs(layers) do
@@ -125,6 +125,7 @@ function DAGLayer:__init(id, global_conf, layer_conf)
self.layers = layers
self.inputs = inputs
self.outputs = outputs
+ self.id = id
self.dim_in = dim_in
self.dim_out = dim_out
self.parsed_conn = parsed_conn
diff --git a/nerv/nn/layer_repo.lua b/nerv/nn/layer_repo.lua
index 602c37c..ef333a7 100644
--- a/nerv/nn/layer_repo.lua
+++ b/nerv/nn/layer_repo.lua
@@ -1,7 +1,12 @@
local LayerRepo = nerv.class("nerv.LayerRepo")
function LayerRepo:__init(layer_spec, param_repo, global_conf)
- local layers = {}
+ self.layers = {}
+ self:add_layers(layer_spec, param_repo, global_conf)
+end
+
+function LayerRepo:add_layers(layer_spec, param_repo, global_conf)
+ local layers = self.layers
for ltype, llist in pairs(layer_spec) do
local layer_type = nerv.get_type(ltype)
for id, spec in pairs(llist) do
@@ -22,7 +27,6 @@ function LayerRepo:__init(layer_spec, param_repo, global_conf)
layers[id] = layer_type(id, global_conf, layer_config)
end
end
- self.layers = layers
end
function LayerRepo:get_layer(lid)