From 821aec314824b89e9fe9c3ee467793a05ed89ee5 Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 2 Jun 2015 12:50:20 +0800 Subject: modify preprocessing example to use layers --- examples/tnet_preprocessing_example.lua | 38 +++++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 7 deletions(-) (limited to 'examples') diff --git a/examples/tnet_preprocessing_example.lua b/examples/tnet_preprocessing_example.lua index c36463b..4f36aa8 100644 --- a/examples/tnet_preprocessing_example.lua +++ b/examples/tnet_preprocessing_example.lua @@ -1,5 +1,7 @@ require 'libspeech' require 'layer.affine' +require 'layer.bias' +require 'layer.window' frm_ext = 5 feat_repo = nerv.TNetFeatureRepo( "/slfs1/users/mfy43/swb_ivec/train_bp.scp", @@ -22,7 +24,7 @@ window1 = cf:read_chunk("window1", gconf) bias2 = cf:read_chunk("bias2", gconf) window2 = cf:read_chunk("window2", gconf) -cf2 = nerv.ChunkFile("input.param", "r") +cf2 = nerv.ChunkFile("feat_256", "r") input = cf2:read_chunk("input", gconf) step = frm_ext * 2 + 1 @@ -32,13 +34,35 @@ expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext) rearranged = expanded:create() rearranged:rearrange_frm(expanded, step) -rearranged:add_row(bias1.trans, 1) -rearranged:scale_row(window1.trans) -rearranged:add_row(bias2.trans, 1) -rearranged:scale_row(window2.trans) +output1 = {expanded:create()} +output2 = {expanded:create()} +output3 = {expanded:create()} +output4 = {expanded:create()} -for i = 0, 9 do - row_diff = input.trans[i] - rearranged[i + 5] +blayer1 = nerv.BiasLayer("b1", gconf, {bias = bias1, + dim_in = {429}, + dim_out = {429}}) +wlayer1 = nerv.WindowLayer("w1", gconf, {window = window1, + dim_in = {429}, + dim_out = {429}}) +blayer2 = nerv.BiasLayer("b1", gconf, {bias = bias2, + dim_in = {429}, + dim_out = {429}}) +wlayer2 = nerv.WindowLayer("w1", gconf, {window = window2, + dim_in = {429}, + dim_out = {429}}) +blayer1:init() +wlayer1:init() +blayer2:init() +wlayer2:init() + +blayer1:propagate({rearranged}, output1) +wlayer1:propagate(output1, output2) +blayer2:propagate(output2, output3) +wlayer2:propagate(output3, output4) + +for i = 0, 157 - 10 do + row_diff = input.trans[i] - output4[1][i + 5] for j = 0, row_diff:ncol() - 1 do nerv.utils.printf("%.8f ", row_diff[j]) end -- cgit v1.2.3-70-g09d2