summaryrefslogtreecommitdiff
path: root/examples/tnet_preprocessing_example.lua
blob: c36463b53c8df47cdb96de8efc69449df59f700c (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
require 'libspeech'
require 'layer.affine'
frm_ext = 5
feat_repo = nerv.TNetFeatureRepo(
                                "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
                                "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
                                frm_ext)
lab_repo = nerv.TNetLabelRepo(
                                "/slfs1/users/mfy43/swb_ivec/ref.mlf",
                                "map",
                                "/slfs1/users/mfy43/swb_ivec/dict",
                                "*/",
                                "lab")
feat_utter = feat_repo:cur_utter()
-- print(feat_utter)
-- lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
-- print(lab_utter)
gconf = {mat_type = nerv.CuMatrixFloat}
cf = nerv.ChunkFile("global_transf.nerv", "r")
bias1 = cf:read_chunk("bias1", gconf)
window1 = cf:read_chunk("window1", gconf)
bias2 = cf:read_chunk("bias2", gconf)
window2 = cf:read_chunk("window2", gconf)

cf2 = nerv.ChunkFile("input.param", "r")
input = cf2:read_chunk("input", gconf)

step = frm_ext * 2 + 1
expanded = nerv.CuMatrixFloat(feat_utter:nrow(), feat_utter:ncol() * step)
expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext)

rearranged = expanded:create()
rearranged:rearrange_frm(expanded, step)

rearranged:add_row(bias1.trans, 1)
rearranged:scale_row(window1.trans)
rearranged:add_row(bias2.trans, 1)
rearranged:scale_row(window2.trans)

for i = 0, 9 do
    row_diff = input.trans[i] - rearranged[i + 5]
    for j = 0, row_diff:ncol() - 1 do
        nerv.utils.printf("%.8f ", row_diff[j])
    end
    nerv.utils.printf("\n")
end