summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-02 20:28:04 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-02 20:28:04 +0800
commit0c6ca6a17f06821cd5d612f489ca6cb68c2c4d5b (patch)
tree54866316793838518fc5f55ea561b8c0b461bd6e
parent821aec314824b89e9fe9c3ee467793a05ed89ee5 (diff)
...
-rw-r--r--examples/tnet_preprocessing_example.lua77
1 files changed, 41 insertions, 36 deletions
diff --git a/examples/tnet_preprocessing_example.lua b/examples/tnet_preprocessing_example.lua
index 4f36aa8..8a65b44 100644
--- a/examples/tnet_preprocessing_example.lua
+++ b/examples/tnet_preprocessing_example.lua
@@ -1,8 +1,40 @@
require 'libspeech'
-require 'layer.affine'
-require 'layer.bias'
-require 'layer.window'
frm_ext = 5
+gconf = {mat_type = nerv.CuMatrixFloat,
+ batch_size = 158}
+param_repo = nerv.ParamRepo({"global_transf.nerv"})
+sublayer_repo = nerv.LayerRepo(
+ {
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ }
+ }, param_repo, gconf)
+
+layer_repo = nerv.LayerRepo(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ main = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+
feat_repo = nerv.TNetFeatureRepo(
"/slfs1/users/mfy43/swb_ivec/train_bp.scp",
"/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
@@ -14,15 +46,10 @@ lab_repo = nerv.TNetLabelRepo(
"*/",
"lab")
feat_utter = feat_repo:cur_utter()
+
-- print(feat_utter)
-- lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
-- print(lab_utter)
-gconf = {mat_type = nerv.CuMatrixFloat}
-cf = nerv.ChunkFile("global_transf.nerv", "r")
-bias1 = cf:read_chunk("bias1", gconf)
-window1 = cf:read_chunk("window1", gconf)
-bias2 = cf:read_chunk("bias2", gconf)
-window2 = cf:read_chunk("window2", gconf)
cf2 = nerv.ChunkFile("feat_256", "r")
input = cf2:read_chunk("input", gconf)
@@ -34,35 +61,13 @@ expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext)
rearranged = expanded:create()
rearranged:rearrange_frm(expanded, step)
-output1 = {expanded:create()}
-output2 = {expanded:create()}
-output3 = {expanded:create()}
-output4 = {expanded:create()}
-
-blayer1 = nerv.BiasLayer("b1", gconf, {bias = bias1,
- dim_in = {429},
- dim_out = {429}})
-wlayer1 = nerv.WindowLayer("w1", gconf, {window = window1,
- dim_in = {429},
- dim_out = {429}})
-blayer2 = nerv.BiasLayer("b1", gconf, {bias = bias2,
- dim_in = {429},
- dim_out = {429}})
-wlayer2 = nerv.WindowLayer("w1", gconf, {window = window2,
- dim_in = {429},
- dim_out = {429}})
-blayer1:init()
-wlayer1:init()
-blayer2:init()
-wlayer2:init()
-
-blayer1:propagate({rearranged}, output1)
-wlayer1:propagate(output1, output2)
-blayer2:propagate(output2, output3)
-wlayer2:propagate(output3, output4)
+output = {expanded:create()}
+main = layer_repo:get_layer("main")
+main:init()
+main:propagate({rearranged}, output)
for i = 0, 157 - 10 do
- row_diff = input.trans[i] - output4[1][i + 5]
+ row_diff = input.trans[i] - output[1][i + 5]
for j = 0, row_diff:ncol() - 1 do
nerv.utils.printf("%.8f ", row_diff[j])
end