From 0c6ca6a17f06821cd5d612f489ca6cb68c2c4d5b Mon Sep 17 00:00:00 2001
From: Determinant <ted.sybil@gmail.com>
Date: Tue, 2 Jun 2015 20:28:04 +0800
Subject: ...

---
 examples/tnet_preprocessing_example.lua | 77 ++++++++++++++++++---------------
 1 file changed, 41 insertions(+), 36 deletions(-)

(limited to 'examples')

diff --git a/examples/tnet_preprocessing_example.lua b/examples/tnet_preprocessing_example.lua
index 4f36aa8..8a65b44 100644
--- a/examples/tnet_preprocessing_example.lua
+++ b/examples/tnet_preprocessing_example.lua
@@ -1,8 +1,40 @@
 require 'libspeech'
-require 'layer.affine'
-require 'layer.bias'
-require 'layer.window'
 frm_ext = 5
+gconf = {mat_type = nerv.CuMatrixFloat,
+        batch_size = 158}
+param_repo = nerv.ParamRepo({"global_transf.nerv"})
+sublayer_repo = nerv.LayerRepo(
+    {
+        ["nerv.BiasLayer"] =
+        {
+            blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+            blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+        },
+        ["nerv.WindowLayer"] =
+        {
+            wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+            wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+        }
+    }, param_repo, gconf)
+
+layer_repo = nerv.LayerRepo(
+    {
+        ["nerv.DAGLayer"] =
+        {
+            main = {{}, {
+                dim_in = {429}, dim_out = {429},
+                sub_layers = sublayer_repo,
+                connections = {
+                    ["<input>[1]"] = "blayer1[1]",
+                    ["blayer1[1]"] = "wlayer1[1]",
+                    ["wlayer1[1]"] = "blayer2[1]",
+                    ["blayer2[1]"] = "wlayer2[1]",
+                    ["wlayer2[1]"] = "<output>[1]"
+                }
+            }}
+        }
+    }, param_repo, gconf)
+
 feat_repo = nerv.TNetFeatureRepo(
                                 "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
                                 "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
@@ -14,15 +46,10 @@ lab_repo = nerv.TNetLabelRepo(
                                 "*/",
                                 "lab")
 feat_utter = feat_repo:cur_utter()
+
 -- print(feat_utter)
 -- lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
 -- print(lab_utter)
-gconf = {mat_type = nerv.CuMatrixFloat}
-cf = nerv.ChunkFile("global_transf.nerv", "r")
-bias1 = cf:read_chunk("bias1", gconf)
-window1 = cf:read_chunk("window1", gconf)
-bias2 = cf:read_chunk("bias2", gconf)
-window2 = cf:read_chunk("window2", gconf)
 
 cf2 = nerv.ChunkFile("feat_256", "r")
 input = cf2:read_chunk("input", gconf)
@@ -34,35 +61,13 @@ expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext)
 rearranged = expanded:create()
 rearranged:rearrange_frm(expanded, step)
 
-output1 = {expanded:create()}
-output2 = {expanded:create()}
-output3 = {expanded:create()}
-output4 = {expanded:create()}
-
-blayer1 = nerv.BiasLayer("b1", gconf, {bias = bias1,
-                                dim_in = {429},
-                                dim_out = {429}})
-wlayer1 = nerv.WindowLayer("w1", gconf, {window = window1,
-                                dim_in = {429},
-                                dim_out = {429}})
-blayer2 = nerv.BiasLayer("b1", gconf, {bias = bias2,
-                                dim_in = {429},
-                                dim_out = {429}})
-wlayer2 = nerv.WindowLayer("w1", gconf, {window = window2,
-                                dim_in = {429},
-                                dim_out = {429}})
-blayer1:init()
-wlayer1:init()
-blayer2:init()
-wlayer2:init()
-
-blayer1:propagate({rearranged}, output1)
-wlayer1:propagate(output1, output2)
-blayer2:propagate(output2, output3)
-wlayer2:propagate(output3, output4)
+output = {expanded:create()}
+main = layer_repo:get_layer("main")
+main:init()
+main:propagate({rearranged}, output)
 
 for i = 0, 157 - 10 do
-    row_diff = input.trans[i] - output4[1][i + 5]
+    row_diff = input.trans[i] - output[1][i + 5]
     for j = 0, row_diff:ncol() - 1 do
         nerv.utils.printf("%.8f ", row_diff[j])
     end
-- 
cgit v1.2.3-70-g09d2