summaryrefslogtreecommitdiff
path: root/htk_io/src/cwrapper.cpp
blob: 66cde23aa2aab3806d528c5f6f5a6d353974c0f5 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
#include "KaldiLib/Features.h"
#include "KaldiLib/Labels.h"
#include "KaldiLib/Common.h"
#include "KaldiLib/UserInterface.h"
#include <string>
#define SNAME "TNET"

extern "C" {
#include "cwrapper.h"
#include "string.h"
#include "pthread.h"
#include "nerv/common.h"

    extern Matrix *nerv_matrix_host_float_create(long nrow, long ncol, Status *status);

    struct TNetFeatureRepo {
        TNet::FeatureRepository feature_repo;
        TNet::UserInterface ui;
        bool swap_features;
        int target_kind;
        int deriv_order;
        int* p_deriv_win_lenghts;
        int start_frm_ext;
        int end_frm_ext;
        char* cmn_path;
        char* cmn_file;
        const char* cmn_mask;
        char* cvn_path;
        char* cvn_file;
        const char* cvn_mask;
        const char* cvg_file;
        TNet::Matrix<float> feats_host; /* KaldiLib implementation */
	int  refcount;
    };

    TNetFeatureRepo *tnet_feature_repo_new(const char *p_script, const char *config, int context) {
        TNetFeatureRepo *repo = new TNetFeatureRepo();
        repo->ui.ReadConfig(config);
        repo->swap_features = !repo->ui.GetBool(SNAME":NATURALREADORDER", TNet::IsBigEndian());
        /* load defaults */
        repo->target_kind = repo->ui.GetFeatureParams(&repo->deriv_order,
                &repo->p_deriv_win_lenghts,
                &repo->start_frm_ext, &repo->end_frm_ext,
                &repo->cmn_path, &repo->cmn_file, &repo->cmn_mask,
                &repo->cvn_path, &repo->cvn_file, &repo->cvn_mask,
                &repo->cvg_file, SNAME":", 0);
        repo->start_frm_ext = repo->end_frm_ext = context;
        repo->feature_repo.Init(repo->swap_features,
                repo->start_frm_ext, repo->end_frm_ext, repo->target_kind,
                repo->deriv_order, repo->p_deriv_win_lenghts,
                repo->cmn_path, repo->cmn_mask,
                repo->cvn_path, repo->cvn_mask, repo->cvg_file);
        repo->feature_repo.AddFileList(p_script);
        repo->feature_repo.Rewind();
        return repo;
    }

    TNetFeatureRepo *tnet_feature_repo_newWithId(long id)
    {
	TNetFeatureRepo *repo = (TNetFeatureRepo*)id;
	__sync_fetch_and_add(&repo->refcount, 1);
	return repo;
    }

    long tnet_feature_repo_id(TNetFeatureRepo *repo)
    {
	return (long)(repo);
    }

    Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo, lua_State *L, int debug) {
        Matrix *mat;                    /* nerv implementation */
        repo->feature_repo.ReadFullMatrix(repo->feats_host);
        std::string utter_str = repo->feature_repo.Current().Logical();
        repo->feats_host.CheckData(utter_str);
        int n = repo->feats_host.Rows();
        int m = repo->feats_host.Cols();
        Status status;
        mat = nerv_matrix_host_float_create(n, m, &status);
        NERV_LUA_CHECK_STATUS(L, status);
        size_t stride = mat->stride;
        if (debug)
            fprintf(stderr, "[tnet] feature: %s %d %d\n", utter_str.c_str(), n, m);
        for (int i = 0; i < n; i++)
        {
            float *row = repo->feats_host.pRowData(i);
            float *nerv_row = (float *)((char *)mat->data.f + i * stride);
            /* use memmove to copy the row, since KaldiLib uses compact storage */
            memmove(nerv_row, row, sizeof(float) * m);
        }
        return mat;
    }

    void tnet_feature_repo_next(TNetFeatureRepo *repo) {
        repo->feature_repo.MoveNext();
    }

    int tnet_feature_repo_is_end(TNetFeatureRepo *repo) {
        return repo->feature_repo.EndOfList();
    }

    size_t tnet_feature_repo_current_samplerate(TNetFeatureRepo *repo) {
        return repo->feature_repo.CurrentHeader().mSamplePeriod;
    }

    const char *tnet_feature_repo_current_tag(TNetFeatureRepo *repo) {
        return repo->feature_repo.Current().Logical().c_str();
    }

    void tnet_feature_repo_destroy(TNetFeatureRepo *repo) {
	if (NULL != repo)
	{
	 	if(__sync_fetch_and_add(&repo->refcount, -1) == 1)
		{
        		if (repo->cmn_mask)
            			free(repo->cmn_path);
        		if (repo->cvn_mask)
            			free(repo->cvn_path);
        		free(repo->p_deriv_win_lenghts);
        		delete repo;
			repo = NULL;
		}
	}
    }

    struct TNetLabelRepo {
        TNet::LabelRepository label_repo;
    };

    TNetLabelRepo *tnet_label_repo_new(const char *mlf, const char *fmt,
                                        const char *fmt_arg, const char *dir,
                                        const char *ext) {
        TNetLabelRepo *repo = new TNetLabelRepo();
        repo->label_repo.InitExt(mlf, fmt, fmt_arg, dir, ext);
        /* repo->label_repo.Init(mlf, fmt_arg, dir, ext); */
        return repo;
    }

    TNetLabelRepo *tnet_label_repo_newWithId(long id)
    {
	return (TNetLabelRepo*)id;
    }

    long tnet_label_repo_id(TNetLabelRepo *repo)
    {
	return (long)(repo);
    }

    Matrix *tnet_label_repo_read_utterance(TNetLabelRepo *repo,
                                            size_t frames,
                                            size_t sample_rate,
                                            const char *tag,
                                            lua_State *L,
                                            int debug) {
        std::vector<TNet::Matrix<float> >  labs_hosts; /* KaldiLib implementation */
        Matrix *mat;
        repo->label_repo.GenDesiredMatrixExt(labs_hosts, frames,
                                            sample_rate, tag);
        int n = labs_hosts[0].Rows();
        int m = labs_hosts[0].Cols();
        Status status;
        mat = nerv_matrix_host_float_create(n, m, &status);
        NERV_LUA_CHECK_STATUS(L, status);
        size_t stride = mat->stride;
        if (debug)
            fprintf(stderr, "[tnet] label: %s %d %d\n", tag, n, m);
        for (int i = 0; i < n; i++)
        {
            float *row = labs_hosts[0].pRowData(i);
            float *nerv_row = (float *)((char *)mat->data.f + i * stride);
            /* use memmove to copy the row, since KaldiLib uses compact storage */
            memmove(nerv_row, row, sizeof(float) * m);
        }
        return mat;
    }

    void tnet_label_repo_destroy(TNetLabelRepo *repo) {
	if (NULL != repo)	
	{
        	delete repo;
		repo = NULL;
	}
    }
}