aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/timit_baseline2.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/timit_baseline2.lua')
-rw-r--r--nerv/examples/timit_baseline2.lua6
1 files changed, 4 insertions, 2 deletions
diff --git a/nerv/examples/timit_baseline2.lua b/nerv/examples/timit_baseline2.lua
index b1c1e66..658aa2e 100644
--- a/nerv/examples/timit_baseline2.lua
+++ b/nerv/examples/timit_baseline2.lua
@@ -8,7 +8,8 @@ gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_output.nerv",
"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_trans.nerv"},
-- params in nnet_trans.nerv are included in the trained model
- decode_param = {"/speechlab/users/mfy43/timit/s5/nerv_20160311205342/nnet_init_20160311211609_iter_13_lr0.013437_tr72.572_cv58.709.nerv"}}
+ decode_param = {"/speechlab/users/mfy43/timit/s5/nerv_20160311205342/nnet_init_20160311211609_iter_13_lr0.013437_tr72.572_cv58.709.nerv"},
+ chunk_size = 1}
function make_layer_repo(param_repo)
local layer_repo = nerv.LayerRepo(
@@ -176,10 +177,11 @@ function make_decode_readers(scp_file, layer_repo)
end
function make_buffer(readers)
- return nerv.SGDBuffer(gconf,
+ return nerv.FrmBuffer(gconf,
{
buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
+ chunk_size = gconf.chunk_size,
randomize = gconf.randomize,
readers = readers,
use_gpu = true