summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--nerv/examples/swb_baseline2.lua71
-rw-r--r--nerv/examples/timit_baseline2.lua16
2 files changed, 43 insertions, 44 deletions
diff --git a/nerv/examples/swb_baseline2.lua b/nerv/examples/swb_baseline2.lua
index 8b5ebb1..c4abc01 100644
--- a/nerv/examples/swb_baseline2.lua
+++ b/nerv/examples/swb_baseline2.lua
@@ -1,7 +1,6 @@
require 'htk_io'
-gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
+gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
rearrange = true, -- just to make the context order consistent with old results, deprecated
- frm_ext = 5,
frm_trim = 5, -- trim the first and last 5 frames, TNet just does this, deprecated
tr_scp = "/speechlab/users/mfy43/swb50/train_bp.scp",
cv_scp = "/speechlab/users/mfy43/swb50/train_cv.scp",
@@ -65,39 +64,39 @@ function make_layer_repo(param_repo)
layer_repo:add_layers(
{
- ["nerv.DAGLayer"] =
+ ["nerv.GraphLayer"] =
{
global_transf = {
dim_in = {429}, dim_out = {429},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "blayer1[1]",
- ["blayer1[1]"] = "wlayer1[1]",
- ["wlayer1[1]"] = "blayer2[1]",
- ["blayer2[1]"] = "wlayer2[1]",
- ["wlayer2[1]"] = "<output>[1]"
+ {"<input>[1]", "blayer1[1]", 0},
+ {"blayer1[1]", "wlayer1[1]", 0},
+ {"wlayer1[1]", "blayer2[1]", 0},
+ {"blayer2[1]", "wlayer2[1]", 0},
+ {"wlayer2[1]", "<output>[1]", 0}
}
},
main = {
dim_in = {429}, dim_out = {3001},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "affine0[1]",
- ["affine0[1]"] = "sigmoid0[1]",
- ["sigmoid0[1]"] = "affine1[1]",
- ["affine1[1]"] = "sigmoid1[1]",
- ["sigmoid1[1]"] = "affine2[1]",
- ["affine2[1]"] = "sigmoid2[1]",
- ["sigmoid2[1]"] = "affine3[1]",
- ["affine3[1]"] = "sigmoid3[1]",
- ["sigmoid3[1]"] = "affine4[1]",
- ["affine4[1]"] = "sigmoid4[1]",
- ["sigmoid4[1]"] = "affine5[1]",
- ["affine5[1]"] = "sigmoid5[1]",
- ["sigmoid5[1]"] = "affine6[1]",
- ["affine6[1]"] = "sigmoid6[1]",
- ["sigmoid6[1]"] = "affine7[1]",
- ["affine7[1]"] = "<output>[1]"
+ {"<input>[1]", "affine0[1]", 0},
+ {"affine0[1]", "sigmoid0[1]", 0},
+ {"sigmoid0[1]", "affine1[1]", 0},
+ {"affine1[1]", "sigmoid1[1]", 0},
+ {"sigmoid1[1]", "affine2[1]", 0},
+ {"affine2[1]", "sigmoid2[1]", 0},
+ {"sigmoid2[1]", "affine3[1]", 0},
+ {"affine3[1]", "sigmoid3[1]", 0},
+ {"sigmoid3[1]", "affine4[1]", 0},
+ {"affine4[1]", "sigmoid4[1]", 0},
+ {"sigmoid4[1]", "affine5[1]", 0},
+ {"affine5[1]", "sigmoid5[1]", 0},
+ {"sigmoid5[1]", "affine6[1]", 0},
+ {"affine6[1]", "sigmoid6[1]", 0},
+ {"sigmoid6[1]", "affine7[1]", 0},
+ {"affine7[1]", "<output>[1]", 0}
}
}
}
@@ -105,25 +104,25 @@ function make_layer_repo(param_repo)
layer_repo:add_layers(
{
- ["nerv.DAGLayer"] =
+ ["nerv.GraphLayer"] =
{
ce_output = {
dim_in = {429, 1}, dim_out = {1},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "main[1]",
- ["main[1]"] = "ce_crit[1]",
- ["<input>[2]"] = "ce_crit[2]",
- ["ce_crit[1]"] = "<output>[1]"
+ {"<input>[1]", "main[1]", 0},
+ {"main[1]", "ce_crit[1]", 0},
+ {"<input>[2]", "ce_crit[2]", 0},
+ {"ce_crit[1]", "<output>[1]", 0}
}
},
softmax_output = {
dim_in = {429}, dim_out = {3001},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "main[1]",
- ["main[1]"] = "softmax[1]",
- ["softmax[1]"] = "<output>[1]"
+ {"<input>[1]", "main[1]", 0},
+ {"main[1]", "softmax[1]", 0},
+ {"softmax[1]", "<output>[1]", 0}
}
}
}
diff --git a/nerv/examples/timit_baseline2.lua b/nerv/examples/timit_baseline2.lua
index d783c3d..b1c1e66 100644
--- a/nerv/examples/timit_baseline2.lua
+++ b/nerv/examples/timit_baseline2.lua
@@ -1,14 +1,14 @@
require 'kaldi_io'
gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
tr_scp = "ark:/speechlab/tools/KALDI/kaldi-master/src/featbin/copy-feats " ..
- "scp:/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/train.scp ark:- |",
+ "scp:/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/train.scp ark:- |",
cv_scp = "ark:/speechlab/tools/KALDI/kaldi-master/src/featbin/copy-feats " ..
- "scp:/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/cv.scp ark:- |",
- initialized_param = {"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/nnet_init.nerv",
- "/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/nnet_output.nerv",
- "/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/nnet_trans.nerv"},
- decode_param = {"/speechlab/users/mfy43/timit/nnet_init_20160229015745_iter_13_lr0.013437_tr72.434_cv58.729.nerv",
- "/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/nnet_trans.nerv"}}
+ "scp:/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/cv.scp ark:- |",
+ initialized_param = {"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_init.nerv",
+ "/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_output.nerv",
+ "/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_trans.nerv"},
+ -- params in nnet_trans.nerv are included in the trained model
+ decode_param = {"/speechlab/users/mfy43/timit/s5/nerv_20160311205342/nnet_init_20160311211609_iter_13_lr0.013437_tr72.572_cv58.709.nerv"}}
function make_layer_repo(param_repo)
local layer_repo = nerv.LayerRepo(
@@ -113,7 +113,7 @@ function make_layer_repo(param_repo)
dim_in = {440}, dim_out = {1959},
layer_repo = layer_repo,
connections = {
- {"<input>[1]", "main[1]", 0},
+ {"<input>[1]", "main[1]", 0},
{"main[1]", "softmax[1]", 0},
{"softmax[1]", "<output>[1]", 0}
}