summaryrefslogtreecommitdiff
path: root/init.lua
blob: 1f200961208f2eabf78901243fcc1d3b1b132db7 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
require 'libspeech'
local TNetReader = nerv.class("nerv.TNetReader", "nerv.DataReader")

function TNetReader:__init(global_conf, reader_conf)
    self.feat_id = reader_conf.id
    self.frm_ext = reader_conf.frm_ext
    self.feat_repo = nerv.TNetFeatureRepo(reader_conf.scp_file,
                                          reader_conf.conf_file,
                                          reader_conf.frm_ext)
    self.lab_repo = {}
    for id, mlf_spec in pairs(reader_conf.mlfs) do
        self.lab_repo[id] = nerv.TNetLabelRepo(mlf_spec.file,
                                            mlf_spec.format,
                                            mlf_spec.format_arg,
                                            mlf_spec.dir,
                                            mlf_spec.ext)
    end
    self.global_transf = reader_conf.global_transf
end

function TNetReader:get_data()
    local res = {}
    local frm_ext = self.frm_ext
    local step = frm_ext * 2 + 1
    local feat_utter = self.feat_repo:cur_utter()
    local expanded = nerv.CuMatrixFloat(feat_utter:nrow(), feat_utter:ncol() * step)
    expanded:expand_frm(nerv.CuMatrixFloat.new_from_host(feat_utter), frm_ext)
    local rearranged = expanded:create()
    rearranged:rearrange_frm(expanded, step)
    local input = {rearranged}
    local output = {rearranged:create()}
    self.global_transf:init()
    self.global_transf:propagate(input, output)
    expanded = nerv.CuMatrixFloat(output[1]:nrow() - frm_ext * 2, output[1]:ncol())
    expanded:copy_fromd(output[1], frm_ext, feat_utter:nrow() - frm_ext)
    res[self.feat_id] = expanded
    for id, repo in pairs(self.lab_repo) do
        local lab_utter = repo:get_utter(self.feat_repo, expanded:nrow())
        res[id] = lab_utter
    end
    self.feat_repo:next()
    return res
end