From 3b156cd0cd19aa7db97082161921064a0a871328 Mon Sep 17 00:00:00 2001 From: Determinant Date: Sat, 30 May 2015 15:27:01 +0800 Subject: add nerv interface for TNet IO --- tnet_io/Makefile | 13 ------- tnet_io/cwrapper.cpp | 4 +- tnet_io/init.c | 103 +++++++++++++++++++++++++++++++++++++++++++++++++++ tnet_io/test.c | 64 ++++++++++++++++---------------- tnet_io/tnet.mk | 2 +- 5 files changed, 137 insertions(+), 49 deletions(-) delete mode 100644 tnet_io/Makefile create mode 100644 tnet_io/init.c (limited to 'tnet_io') diff --git a/tnet_io/Makefile b/tnet_io/Makefile deleted file mode 100644 index a943886..0000000 --- a/tnet_io/Makefile +++ /dev/null @@ -1,13 +0,0 @@ -.PHONY: tnet -all: tnet $(OBJ_DIR)/test -$(OBJ_DIR)/test: $(OBJ_DIR)/cwrapper.o $(OBJ_DIR)/test.o $(OBJ_DIR)/libKaldiLib.a - g++ -o $@ $^ -Wl,-rpath=$(BUILD_DIR)/objs -L$(BUILD_DIR)/objs -lnerv -Wl,-rpath=$(BUILD_DIR)/luajit-2.0/lib -L../../build/luajit-2.0/lib -lluajit-5.1 -$(OBJ_DIR)/cwrapper.o: cwrapper.cpp - g++ -o $@ -c $< -DHAVE_ATLAS -I KaldiLib/ -g -$(OBJ_DIR)/%.o: %.c - g++ -o $@ -c $< -g -clean: - -rm $(OBJ_DIR)/*.o - $(MAKE) -C KaldiLib/ clean -tnet: - $(MAKE) -C KaldiLib/ diff --git a/tnet_io/cwrapper.cpp b/tnet_io/cwrapper.cpp index f992731..df4bdef 100644 --- a/tnet_io/cwrapper.cpp +++ b/tnet_io/cwrapper.cpp @@ -91,7 +91,7 @@ extern "C" { if (repo->cvn_mask) free(repo->cvn_path); free(repo->p_deriv_win_lenghts); - free(repo); + delete repo; } struct TNetLabelRepo { @@ -131,6 +131,6 @@ extern "C" { } void tnet_label_repo_destroy(TNetLabelRepo *repo) { - free(repo); + delete repo; } } diff --git a/tnet_io/init.c b/tnet_io/init.c new file mode 100644 index 0000000..7195eb7 --- /dev/null +++ b/tnet_io/init.c @@ -0,0 +1,103 @@ +#include "../../common.h" +#include "cwrapper.h" +#include + +const char *nerv_tnet_feat_repo_tname = "nerv.TNetFeatureRepo"; +const char *nerv_tnet_label_repo_tname = "nerv.TNetLabelRepo"; +extern const char *nerv_matrix_host_float_tname; + +static int feat_repo_new(lua_State *L) { + const char *scp_file = luaL_checkstring(L, 1); + const char *conf = luaL_checkstring(L, 2); + int frm_ext = luaL_checkinteger(L, 3); + TNetFeatureRepo *repo = tnet_feature_repo_new(scp_file, conf, frm_ext); + luaT_pushudata(L, repo, nerv_tnet_feat_repo_tname); + return 1; +} + +static int feat_repo_destroy(lua_State *L) { + TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname); + tnet_feature_repo_destroy(repo); + return 0; +} + +static int feat_repo_current_tag(lua_State *L) { + TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname); + lua_pushstring(L, tnet_feature_repo_current_tag(repo)); + return 1; +} + +static int feat_repo_current_utterance(lua_State *L) { + TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname); + Matrix *utter = tnet_feature_repo_read_utterance(repo); + luaT_pushudata(L, utter, nerv_matrix_host_float_tname); + return 1; +} + +static int feat_repo_next(lua_State *L) { + TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname); + tnet_feature_repo_next(repo); + return 0; +} + +static const luaL_Reg feat_repo_methods[] = { + {"cur_utter", feat_repo_current_utterance}, + {"cur_tag", feat_repo_current_tag}, + {"next", feat_repo_next}, + {NULL, NULL} +}; + +static int label_repo_new(lua_State *L) { + const char *mlf_file = luaL_checkstring(L, 1); + const char *fmt = luaL_checkstring(L, 2); + const char *arg = luaL_checkstring(L, 3); + const char *dir = luaL_checkstring(L, 4); + const char *ext = luaL_checkstring(L, 5); + TNetLabelRepo *repo = tnet_label_repo_new( + mlf_file, fmt, arg, + dir, ext); + luaT_pushudata(L, repo, nerv_tnet_label_repo_tname); + return 1; +} + +static int label_repo_read_utterance(lua_State *L) { + TNetLabelRepo *repo = luaT_checkudata(L, 1, nerv_tnet_label_repo_tname); + TNetFeatureRepo *feat_repo = luaT_checkudata(L, 2, nerv_tnet_feat_repo_tname); + size_t frames = luaL_checkinteger(L, 3); + Matrix *utter = tnet_label_repo_read_utterance(repo, + frames, + tnet_feature_repo_current_samplerate(feat_repo), + tnet_feature_repo_current_tag(feat_repo)); + luaT_pushudata(L, utter, nerv_matrix_host_float_tname); + return 1; +} + +static int label_repo_destroy(lua_State *L) { + TNetLabelRepo *repo = luaT_checkudata(L, 1, nerv_tnet_label_repo_tname); + tnet_label_repo_destroy(repo); + return 0; +} + +static const luaL_Reg label_repo_methods[] = { + {"get_utter", label_repo_read_utterance}, + {NULL, NULL} +}; + +static void feat_repo_init(lua_State *L) { + luaT_newmetatable(L, nerv_tnet_feat_repo_tname, NULL, + feat_repo_new, feat_repo_destroy, NULL); + luaL_register(L, NULL, feat_repo_methods); + lua_pop(L, 1); +} + +static void label_repo_init(lua_State *L) { + luaT_newmetatable(L, nerv_tnet_label_repo_tname, NULL, + label_repo_new, label_repo_destroy, NULL); + luaL_register(L, NULL, label_repo_methods); + lua_pop(L, 1); +} + +void tnet_io_init(lua_State *L) { + feat_repo_init(L); + label_repo_init(L); +} diff --git a/tnet_io/test.c b/tnet_io/test.c index 397b249..a5b8f55 100644 --- a/tnet_io/test.c +++ b/tnet_io/test.c @@ -1,41 +1,39 @@ #include "cwrapper.h" #include -extern "C" { - void print_nerv_matrix(Matrix *mat) { - int n = mat->nrow; - int m = mat->ncol; - int i, j; - size_t stride = mat->stride; - for (i = 0; i < n; i++) - { +void print_nerv_matrix(Matrix *mat) { + int n = mat->nrow; + int m = mat->ncol; + int i, j; + size_t stride = mat->stride; + for (i = 0; i < n; i++) + { - float *nerv_row = (float *)((char *)mat->data.f + i * stride); - for (j = 0; j < m; j++) - fprintf(stderr, "%.8f ", nerv_row[j]); - fprintf(stderr, "\n"); - } + float *nerv_row = (float *)((char *)mat->data.f + i * stride); + for (j = 0; j < m; j++) + fprintf(stderr, "%.8f ", nerv_row[j]); + fprintf(stderr, "\n"); } +} - int main() { - fprintf(stderr, "init repo\n"); - TNetFeatureRepo *feat_repo = tnet_feature_repo_new( - "/slfs1/users/mfy43/swb_ivec/train_bp.scp", - "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf", 5); - Matrix *feat_utter; - feat_utter = tnet_feature_repo_read_utterance(feat_repo); +int main() { + fprintf(stderr, "init repo\n"); + TNetFeatureRepo *feat_repo = tnet_feature_repo_new( + "/slfs1/users/mfy43/swb_ivec/train_bp.scp", + "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf", 5); + Matrix *feat_utter; + feat_utter = tnet_feature_repo_read_utterance(feat_repo); - TNetLabelRepo *lab_repo = tnet_label_repo_new( - "/slfs1/users/mfy43/swb_ivec/ref.mlf", - "map", - "/slfs1/users/mfy43/swb_ivec/dict", - "*/", - "lab"); - Matrix *lab_utter = tnet_label_repo_read_utterance(lab_repo, - feat_utter->nrow - 5 * 2, - tnet_feature_repo_current_samplerate(feat_repo), - tnet_feature_repo_current_tag(feat_repo)); - print_nerv_matrix(lab_utter); - return 0; - } + TNetLabelRepo *lab_repo = tnet_label_repo_new( + "/slfs1/users/mfy43/swb_ivec/ref.mlf", + "map", + "/slfs1/users/mfy43/swb_ivec/dict", + "*/", + "lab"); + Matrix *lab_utter = tnet_label_repo_read_utterance(lab_repo, + feat_utter->nrow - 5 * 2, + tnet_feature_repo_current_samplerate(feat_repo), + tnet_feature_repo_current_tag(feat_repo)); + print_nerv_matrix(lab_utter); + return 0; } diff --git a/tnet_io/tnet.mk b/tnet_io/tnet.mk index 775fe85..ba4862b 100644 --- a/tnet_io/tnet.mk +++ b/tnet_io/tnet.mk @@ -47,7 +47,7 @@ CUDA_TK_BASE=/usr/local/cuda # compilation args -CXXFLAGS += -g -Wall -O2 -DHAVE_ATLAS -rdynamic +CXXFLAGS += -g -Wall -O2 -DHAVE_ATLAS -rdynamic -fPIC CXXFLAGS += -Wshadow -Wpointer-arith -Wcast-qual -Wcast-align -Wwrite-strings -Wconversion # enable double-precision -- cgit v1.2.3-70-g09d2