aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/swb_baseline2.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/swb_baseline2.lua')
-rw-r--r--nerv/examples/swb_baseline2.lua87
1 files changed, 44 insertions, 43 deletions
diff --git a/nerv/examples/swb_baseline2.lua b/nerv/examples/swb_baseline2.lua
index 8b5ebb1..38cfb9a 100644
--- a/nerv/examples/swb_baseline2.lua
+++ b/nerv/examples/swb_baseline2.lua
@@ -1,13 +1,13 @@
require 'htk_io'
-gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
+gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
rearrange = true, -- just to make the context order consistent with old results, deprecated
- frm_ext = 5,
frm_trim = 5, -- trim the first and last 5 frames, TNet just does this, deprecated
tr_scp = "/speechlab/users/mfy43/swb50/train_bp.scp",
cv_scp = "/speechlab/users/mfy43/swb50/train_cv.scp",
htk_conf = "/speechlab/users/mfy43/swb50/plp_0_d_a.conf",
initialized_param = {"/speechlab/users/mfy43/swb50/swb_init.nerv",
- "/speechlab/users/mfy43/swb50/swb_global_transf.nerv"}}
+ "/speechlab/users/mfy43/swb50/swb_global_transf.nerv"},
+ chunk_size = 1}
function make_layer_repo(param_repo)
local layer_repo = nerv.LayerRepo(
@@ -15,13 +15,13 @@ function make_layer_repo(param_repo)
-- global transf
["nerv.BiasLayer"] =
{
- blayer1 = {dim_in = {429}, dim_out = {429}, params = {bias = "bias1"}},
- blayer2 = {dim_in = {429}, dim_out = {429}, params = {bias = "bias2"}}
+ blayer1 = {dim_in = {429}, dim_out = {429}, params = {bias = "bias0"}},
+ blayer2 = {dim_in = {429}, dim_out = {429}, params = {bias = "bias1"}}
},
["nerv.WindowLayer"] =
{
- wlayer1 = {dim_in = {429}, dim_out = {429}, params = {window = "window1"}},
- wlayer2 = {dim_in = {429}, dim_out = {429}, params = {window = "window2"}}
+ wlayer1 = {dim_in = {429}, dim_out = {429}, params = {window = "window0"}},
+ wlayer2 = {dim_in = {429}, dim_out = {429}, params = {window = "window1"}}
},
-- biased linearity
["nerv.AffineLayer"] =
@@ -65,39 +65,39 @@ function make_layer_repo(param_repo)
layer_repo:add_layers(
{
- ["nerv.DAGLayer"] =
+ ["nerv.GraphLayer"] =
{
global_transf = {
dim_in = {429}, dim_out = {429},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "blayer1[1]",
- ["blayer1[1]"] = "wlayer1[1]",
- ["wlayer1[1]"] = "blayer2[1]",
- ["blayer2[1]"] = "wlayer2[1]",
- ["wlayer2[1]"] = "<output>[1]"
+ {"<input>[1]", "blayer1[1]", 0},
+ {"blayer1[1]", "wlayer1[1]", 0},
+ {"wlayer1[1]", "blayer2[1]", 0},
+ {"blayer2[1]", "wlayer2[1]", 0},
+ {"wlayer2[1]", "<output>[1]", 0}
}
},
main = {
dim_in = {429}, dim_out = {3001},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "affine0[1]",
- ["affine0[1]"] = "sigmoid0[1]",
- ["sigmoid0[1]"] = "affine1[1]",
- ["affine1[1]"] = "sigmoid1[1]",
- ["sigmoid1[1]"] = "affine2[1]",
- ["affine2[1]"] = "sigmoid2[1]",
- ["sigmoid2[1]"] = "affine3[1]",
- ["affine3[1]"] = "sigmoid3[1]",
- ["sigmoid3[1]"] = "affine4[1]",
- ["affine4[1]"] = "sigmoid4[1]",
- ["sigmoid4[1]"] = "affine5[1]",
- ["affine5[1]"] = "sigmoid5[1]",
- ["sigmoid5[1]"] = "affine6[1]",
- ["affine6[1]"] = "sigmoid6[1]",
- ["sigmoid6[1]"] = "affine7[1]",
- ["affine7[1]"] = "<output>[1]"
+ {"<input>[1]", "affine0[1]", 0},
+ {"affine0[1]", "sigmoid0[1]", 0},
+ {"sigmoid0[1]", "affine1[1]", 0},
+ {"affine1[1]", "sigmoid1[1]", 0},
+ {"sigmoid1[1]", "affine2[1]", 0},
+ {"affine2[1]", "sigmoid2[1]", 0},
+ {"sigmoid2[1]", "affine3[1]", 0},
+ {"affine3[1]", "sigmoid3[1]", 0},
+ {"sigmoid3[1]", "affine4[1]", 0},
+ {"affine4[1]", "sigmoid4[1]", 0},
+ {"sigmoid4[1]", "affine5[1]", 0},
+ {"affine5[1]", "sigmoid5[1]", 0},
+ {"sigmoid5[1]", "affine6[1]", 0},
+ {"affine6[1]", "sigmoid6[1]", 0},
+ {"sigmoid6[1]", "affine7[1]", 0},
+ {"affine7[1]", "<output>[1]", 0}
}
}
}
@@ -105,25 +105,25 @@ function make_layer_repo(param_repo)
layer_repo:add_layers(
{
- ["nerv.DAGLayer"] =
+ ["nerv.GraphLayer"] =
{
ce_output = {
dim_in = {429, 1}, dim_out = {1},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "main[1]",
- ["main[1]"] = "ce_crit[1]",
- ["<input>[2]"] = "ce_crit[2]",
- ["ce_crit[1]"] = "<output>[1]"
+ {"<input>[1]", "main[1]", 0},
+ {"main[1]", "ce_crit[1]", 0},
+ {"<input>[2]", "ce_crit[2]", 0},
+ {"ce_crit[1]", "<output>[1]", 0}
}
},
softmax_output = {
dim_in = {429}, dim_out = {3001},
- sub_layers = layer_repo,
+ layer_repo = layer_repo,
connections = {
- ["<input>[1]"] = "main[1]",
- ["main[1]"] = "softmax[1]",
- ["softmax[1]"] = "<output>[1]"
+ {"<input>[1]", "main[1]", 0},
+ {"main[1]", "softmax[1]", 0},
+ {"softmax[1]", "<output>[1]", 0}
}
}
}
@@ -146,7 +146,7 @@ end
function make_readers(scp_file, layer_repo)
return {
- {reader = nerv.TNetReader(gconf,
+ {reader = nerv.HTKReader(gconf,
{
id = "main_scp",
scp_file = scp_file,
@@ -167,10 +167,11 @@ function make_readers(scp_file, layer_repo)
end
function make_buffer(readers)
- return nerv.SGDBuffer(gconf,
+ return nerv.FrmBuffer(gconf,
{
buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
+ chunk_size = gconf.chunk_size,
randomize = gconf.randomize,
readers = readers,
use_gpu = true