summaryrefslogtreecommitdiff
path: root/htk_io/examples
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-06-25 12:56:45 +0800
committerDeterminant <[email protected]>2015-06-25 12:56:45 +0800
commita74183ddb4ab8383bfe214b3745eb8a0a99ee47a (patch)
treed5e69cf8c4c2db2e3a4722778352fc3c95953bb2 /htk_io/examples
parentb6301089cde20f4c825c7f5deaf179082aad63da (diff)
let HTK I/O implementation be a single package
Diffstat (limited to 'htk_io/examples')
-rw-r--r--htk_io/examples/tnet_io_example.lua16
-rw-r--r--htk_io/examples/tnet_preprocessing_example.lua75
-rw-r--r--htk_io/examples/tnet_preprocessing_example2.lua68
-rw-r--r--htk_io/examples/tnet_sgd_buffer.lua70
4 files changed, 229 insertions, 0 deletions
diff --git a/htk_io/examples/tnet_io_example.lua b/htk_io/examples/tnet_io_example.lua
new file mode 100644
index 0000000..eea73a5
--- /dev/null
+++ b/htk_io/examples/tnet_io_example.lua
@@ -0,0 +1,16 @@
+require 'libspeech'
+frm_ext = 5
+feat_repo = nerv.TNetFeatureRepo(
+ "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ frm_ext)
+lab_repo = nerv.TNetLabelRepo(
+ "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ "map",
+ "/slfs1/users/mfy43/swb_ivec/dict",
+ "*/",
+ "lab")
+feat_utter = feat_repo:cur_utter()
+print(feat_utter)
+lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
+print(lab_utter)
diff --git a/htk_io/examples/tnet_preprocessing_example.lua b/htk_io/examples/tnet_preprocessing_example.lua
new file mode 100644
index 0000000..9e1c0ce
--- /dev/null
+++ b/htk_io/examples/tnet_preprocessing_example.lua
@@ -0,0 +1,75 @@
+require 'libspeech'
+frm_ext = 5
+gconf = {cumat_type = nerv.CuMatrixFloat,
+ batch_size = 158}
+param_repo = nerv.ParamRepo({"global_transf.nerv"})
+sublayer_repo = nerv.LayerRepo(
+ {
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ }
+ }, param_repo, gconf)
+
+layer_repo = nerv.LayerRepo(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ main = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+
+feat_repo = nerv.TNetFeatureRepo(
+ "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ frm_ext)
+lab_repo = nerv.TNetLabelRepo(
+ "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ "map",
+ "/slfs1/users/mfy43/swb_ivec/dict",
+ "*/",
+ "lab")
+feat_utter = feat_repo:cur_utter()
+
+-- print(feat_utter)
+-- lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
+-- print(lab_utter)
+
+cf2 = nerv.ChunkFile("feat_256", "r")
+input = cf2:read_chunk("input", gconf)
+
+step = frm_ext * 2 + 1
+expanded = nerv.CuMatrixFloat(feat_utter:nrow(), feat_utter:ncol() * step)
+expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext)
+
+rearranged = expanded:create()
+rearranged:rearrange_frm(expanded, step)
+
+output = {expanded:create()}
+main = layer_repo:get_layer("main")
+main:init()
+main:propagate({rearranged}, output)
+
+for i = 0, 157 - 10 do
+ row_diff = input.trans[i] - output[1][i + 5]
+ for j = 0, row_diff:ncol() - 1 do
+ nerv.printf("%.8f ", row_diff[j])
+ end
+ nerv.printf("\n")
+end
diff --git a/htk_io/examples/tnet_preprocessing_example2.lua b/htk_io/examples/tnet_preprocessing_example2.lua
new file mode 100644
index 0000000..1215b23
--- /dev/null
+++ b/htk_io/examples/tnet_preprocessing_example2.lua
@@ -0,0 +1,68 @@
+require 'speech.init'
+gconf = {cumat_type = nerv.CuMatrixFloat,
+ batch_size = 158}
+param_repo = nerv.ParamRepo({"global_transf.nerv"})
+
+sublayer_repo = nerv.LayerRepo(
+ {
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ }
+ }, param_repo, gconf)
+
+layer_repo = nerv.LayerRepo(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ main = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+
+reader = nerv.TNetReader({},
+ {
+ id = "main_scp",
+ scp_file = "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ conf_file = "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ frm_ext = 5,
+ mlfs = {
+ ref = {
+ file = "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ format = "map",
+ format_arg = "/slfs1/users/mfy43/swb_ivec/dict",
+ dir = "*/",
+ ext = "lab"
+ }
+ },
+ global_transf = layer_repo:get_layer("main")
+ })
+
+utter = reader:get_data()
+-- print(utter.main_scp)
+print(utter.ref)
+-- cf2 = nerv.ChunkFile("feat_256", "r")
+-- input = cf2:read_chunk("input", gconf)
+
+-- for i = 0, 157 - 10 do
+-- row_diff = input.trans[i] - utter.main_scp[i]
+-- for j = 0, row_diff:ncol() - 1 do
+-- nerv.printf("%.8f ", row_diff[j])
+-- end
+-- nerv.printf("\n")
+-- end
diff --git a/htk_io/examples/tnet_sgd_buffer.lua b/htk_io/examples/tnet_sgd_buffer.lua
new file mode 100644
index 0000000..152d2f5
--- /dev/null
+++ b/htk_io/examples/tnet_sgd_buffer.lua
@@ -0,0 +1,70 @@
+require 'speech.init'
+gconf = {cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ batch_size = 256}
+param_repo = nerv.ParamRepo({"global_transf.nerv"})
+
+sublayer_repo = nerv.LayerRepo(
+ {
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ }
+ }, param_repo, gconf)
+
+layer_repo = nerv.LayerRepo(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ main = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+
+tnet_reader = nerv.TNetReader({},
+ {
+ id = "main_scp",
+-- scp_file = "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ scp_file = "t.scp",
+ conf_file = "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ frm_ext = 5,
+ mlfs = {
+ ref = {
+ file = "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ format = "map",
+ format_arg = "/slfs1/users/mfy43/swb_ivec/dict",
+ dir = "*/",
+ ext = "lab"
+ }
+ },
+ global_transf = layer_repo:get_layer("main")
+ })
+
+buffer = nerv.SGDBuffer(gconf,
+ {
+ buffer_size = 1024,
+ readers = {
+ { reader = tnet_reader,
+ data = {main_scp = 429, ref = 1}}
+ }
+ })
+
+for data in buffer.get_data, buffer do
+ print(data.main_scp)
+-- print(data.ref)
+end