aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-16 17:53:39 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-16 17:53:39 +0800
commit289ac7f4b6e88b935da5c891e1efcf91fc047403 (patch)
treed4fc3a4fc20f2d5908624b3f6587ecd57966d719 /nerv/examples
parent07fc1e2794027d44c255e1062c4491346b101a08 (diff)
merge seq_buffer and change asr_trainer.lua accordingly
Diffstat (limited to 'nerv/examples')
-rw-r--r--nerv/examples/asr_trainer.lua71
-rw-r--r--nerv/examples/swb_baseline.lua1
-rw-r--r--nerv/examples/swb_baseline2.lua8
-rw-r--r--nerv/examples/timit_baseline2.lua6
4 files changed, 52 insertions, 34 deletions
diff --git a/nerv/examples/asr_trainer.lua b/nerv/examples/asr_trainer.lua
index 6bdf57c..645f1ef 100644
--- a/nerv/examples/asr_trainer.lua
+++ b/nerv/examples/asr_trainer.lua
@@ -22,9 +22,9 @@ local function build_trainer(ifname)
local input_order = get_input_order()
network = nerv.Network("nt", gconf, {network = network})
- network:init(gconf.batch_size, 1)
+ network:init(gconf.batch_size, gconf.chunk_size)
global_transf = nerv.Network("gt", gconf, {network = global_transf})
- global_transf:init(gconf.batch_size, 1)
+ global_transf:init(gconf.batch_size, gconf.chunk_size)
local iterative_trainer = function (prefix, scp_file, bp, rebind_param_repo)
-- rebind the params if necessary
@@ -39,11 +39,17 @@ local function build_trainer(ifname)
local buffer = make_buffer(make_readers(scp_file, layer_repo))
-- initialize the network
gconf.cnt = 0
- err_input = {mat_type(gconf.batch_size, 1)}
- err_input[1]:fill(1)
+ local err_input = {{}}
+ local output = {{}}
+ for i = 1, gconf.chunk_size do
+ local mini_batch = mat_type(gconf.batch_size, 1)
+ mini_batch:fill(1)
+ table.insert(err_input[1], mini_batch)
+ table.insert(output[1], mat_type(gconf.batch_size, 1))
+ end
network:epoch_init()
global_transf:epoch_init()
- for data in buffer.get_data, buffer do
+ for d in buffer.get_data, buffer do
-- prine stat periodically
gconf.cnt = gconf.cnt + 1
if gconf.cnt == 1000 then
@@ -54,35 +60,39 @@ local function build_trainer(ifname)
-- break
end
local input = {}
+ local err_output = {}
-- if gconf.cnt == 1000 then break end
for i, e in ipairs(input_order) do
local id = e.id
- if data[id] == nil then
+ if d.data[id] == nil then
nerv.error("input data %s not found", id)
end
- local transformed
+ local transformed = {}
+ local err_output_i = {}
if e.global_transf then
- transformed = nerv.speech_utils.global_transf(data[id],
- global_transf,
- gconf.frm_ext or 0, 0,
- gconf)
+ for _, mini_batch in ipairs(d.data[id]) do
+ table.insert(transformed,
+ nerv.speech_utils.global_transf(mini_batch,
+ global_transf,
+ gconf.frm_ext or 0, 0,
+ gconf))
+ end
else
- transformed = data[id]
+ transformed = d.data[id]
+ end
+ for _, mini_batch in ipairs(transformed) do
+ table.insert(err_output_i, mini_batch:create())
end
+ table.insert(err_output, err_output_i)
table.insert(input, transformed)
end
- local output = {mat_type(gconf.batch_size, 1)}
- err_output = {}
- for i = 1, #input do
- table.insert(err_output, input[i]:create())
- end
- network:mini_batch_init({seq_length = table.vector(gconf.batch_size, 1),
- new_seq = {},
+ network:mini_batch_init({seq_length = d.seq_length,
+ new_seq = d.new_seq,
do_train = bp,
- input = {input},
- output = {output},
- err_input = {err_input},
- err_output = {err_output}})
+ input = input,
+ output = output,
+ err_input = err_input,
+ err_output = err_output})
network:propagate()
if bp then
network:back_propagate()
@@ -111,19 +121,21 @@ end
local function check_and_add_defaults(spec, opts)
local function get_opt_val(k)
- return opts[string.gsub(k, '_', '-')].val
+ local k = string.gsub(k, '_', '-')
+ return opts[k].val, opts[k].specified
end
local opt_v = get_opt_val("resume_from")
if opt_v then
+ nerv.info("resuming from previous training state")
gconf = dofile(opt_v)
else
for k, v in pairs(spec) do
- local opt_v = get_opt_val(k)
- if opt_v ~= nil then
+ local opt_v, specified = get_opt_val(k)
+ if (not specified) and gconf[k] ~= nil then
+ nerv.info("using setting in network config file: %s = %s", k, gconf[k])
+ elseif opt_v ~= nil then
+ nerv.info("using setting in options: %s = %s", k, opt_v)
gconf[k] = opt_v
- elseif gconf[k] ~= nil then
- elseif v ~= nil then
- gconf[k] = v
end
end
end
@@ -168,6 +180,7 @@ end
local trainer_defaults = {
lrate = 0.8,
batch_size = 256,
+ chunk_size = 1,
buffer_size = 81920,
wcost = 1e-6,
momentum = 0.9,
diff --git a/nerv/examples/swb_baseline.lua b/nerv/examples/swb_baseline.lua
index 0ce8468..ece4d44 100644
--- a/nerv/examples/swb_baseline.lua
+++ b/nerv/examples/swb_baseline.lua
@@ -171,6 +171,7 @@ function make_buffer(readers)
{
buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
+ chunk_size = gconf.chunk_size,
randomize = gconf.randomize,
readers = readers,
use_gpu = true
diff --git a/nerv/examples/swb_baseline2.lua b/nerv/examples/swb_baseline2.lua
index 6796f6f..38cfb9a 100644
--- a/nerv/examples/swb_baseline2.lua
+++ b/nerv/examples/swb_baseline2.lua
@@ -6,7 +6,8 @@ gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
cv_scp = "/speechlab/users/mfy43/swb50/train_cv.scp",
htk_conf = "/speechlab/users/mfy43/swb50/plp_0_d_a.conf",
initialized_param = {"/speechlab/users/mfy43/swb50/swb_init.nerv",
- "/speechlab/users/mfy43/swb50/swb_global_transf.nerv"}}
+ "/speechlab/users/mfy43/swb50/swb_global_transf.nerv"},
+ chunk_size = 1}
function make_layer_repo(param_repo)
local layer_repo = nerv.LayerRepo(
@@ -145,7 +146,7 @@ end
function make_readers(scp_file, layer_repo)
return {
- {reader = nerv.TNetReader(gconf,
+ {reader = nerv.HTKReader(gconf,
{
id = "main_scp",
scp_file = scp_file,
@@ -166,10 +167,11 @@ function make_readers(scp_file, layer_repo)
end
function make_buffer(readers)
- return nerv.SGDBuffer(gconf,
+ return nerv.FrmBuffer(gconf,
{
buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
+ chunk_size = gconf.chunk_size,
randomize = gconf.randomize,
readers = readers,
use_gpu = true
diff --git a/nerv/examples/timit_baseline2.lua b/nerv/examples/timit_baseline2.lua
index b1c1e66..658aa2e 100644
--- a/nerv/examples/timit_baseline2.lua
+++ b/nerv/examples/timit_baseline2.lua
@@ -8,7 +8,8 @@ gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9, frm_ext = 5,
"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_output.nerv",
"/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/nnet_trans.nerv"},
-- params in nnet_trans.nerv are included in the trained model
- decode_param = {"/speechlab/users/mfy43/timit/s5/nerv_20160311205342/nnet_init_20160311211609_iter_13_lr0.013437_tr72.572_cv58.709.nerv"}}
+ decode_param = {"/speechlab/users/mfy43/timit/s5/nerv_20160311205342/nnet_init_20160311211609_iter_13_lr0.013437_tr72.572_cv58.709.nerv"},
+ chunk_size = 1}
function make_layer_repo(param_repo)
local layer_repo = nerv.LayerRepo(
@@ -176,10 +177,11 @@ function make_decode_readers(scp_file, layer_repo)
end
function make_buffer(readers)
- return nerv.SGDBuffer(gconf,
+ return nerv.FrmBuffer(gconf,
{
buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
+ chunk_size = gconf.chunk_size,
randomize = gconf.randomize,
readers = readers,
use_gpu = true