summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-05-30 15:27:01 +0800
committerDeterminant <ted.sybil@gmail.com>2015-05-30 15:27:01 +0800
commit3b156cd0cd19aa7db97082161921064a0a871328 (patch)
treef58a2f25edbc5b3495756370fae6c33aab4f54c9
parent54546414d1700ed34bf2fe896f36e86e59850b63 (diff)
add nerv interface for TNet IO
-rw-r--r--.gitignore4
-rw-r--r--Makefile15
-rw-r--r--examples/tnet_io_example.lua16
-rw-r--r--init.c8
-rw-r--r--tnet_io/Makefile13
-rw-r--r--tnet_io/cwrapper.cpp4
-rw-r--r--tnet_io/init.c103
-rw-r--r--tnet_io/test.c64
-rw-r--r--tnet_io/tnet.mk2
9 files changed, 180 insertions, 49 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..d8e6051
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+*.o
+build/
+*.swp
+*.swo
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..086527a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,15 @@
+.PHONY: tnet
+all: tnet $(OBJ_DIR)/tnet_io/test $(LIB_DIR)/libspeech.so
+$(LIB_DIR)/libspeech.so: $(OBJ_DIR)/tnet_io/cwrapper.o $(OBJ_DIR)/init.o $(OBJ_DIR)/tnet_io/init.o $(OBJ_DIR)/tnet_io/libKaldiLib.a
+ gcc -shared -o $@ $(OBJ_DIR)/tnet_io/cwrapper.o $(OBJ_DIR)/init.o $(OBJ_DIR)/tnet_io/libKaldiLib.a $(OBJ_DIR)/tnet_io/init.o -lstdc++ -Wl,-rpath=$(LIB_DIR) -L$(LIB_DIR) -lnerv
+$(OBJ_DIR)/tnet_io/test: $(OBJ_DIR)/tnet_io/cwrapper.o $(OBJ_DIR)/tnet_io/test.o $(OBJ_DIR)/tnet_io/libKaldiLib.a
+ gcc -o $@ $^ -Wl,-rpath=$(LIB_DIR) -L$(LIB_DIR) -lnerv -Wl,-rpath=$(BUILD_DIR)/luajit-2.0/lib -L$(BUILD_DIR)/luajit-2.0/lib -lluajit-5.1 -lstdc++ -lm
+$(OBJ_DIR)/tnet_io/cwrapper.o: tnet_io/cwrapper.cpp
+ g++ -o $@ -c $< -DHAVE_ATLAS -I tnet_io/KaldiLib/ -g -fPIC
+$(OBJ_DIR)/%.o: %.c
+ gcc -o $@ -c $< -g -I $(BUILD_DIR)/luajit-2.0/include/luajit-2.0 -fPIC
+clean:
+ -rm $(OBJ_DIR)/tnet_io/*.o
+ $(MAKE) -C tnet_io/KaldiLib/ clean
+tnet:
+ $(MAKE) -C tnet_io/KaldiLib/ OBJ_DIR=$(OBJ_DIR)/tnet_io
diff --git a/examples/tnet_io_example.lua b/examples/tnet_io_example.lua
new file mode 100644
index 0000000..eea73a5
--- /dev/null
+++ b/examples/tnet_io_example.lua
@@ -0,0 +1,16 @@
+require 'libspeech'
+frm_ext = 5
+feat_repo = nerv.TNetFeatureRepo(
+ "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ frm_ext)
+lab_repo = nerv.TNetLabelRepo(
+ "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ "map",
+ "/slfs1/users/mfy43/swb_ivec/dict",
+ "*/",
+ "lab")
+feat_utter = feat_repo:cur_utter()
+print(feat_utter)
+lab_utter = lab_repo:get_utter(feat_repo, feat_utter:nrow() - frm_ext * 2)
+print(lab_utter)
diff --git a/init.c b/init.c
new file mode 100644
index 0000000..0cf476d
--- /dev/null
+++ b/init.c
@@ -0,0 +1,8 @@
+#include "../common.h"
+#include <stdio.h>
+
+extern void tnet_io_init(lua_State *L);
+int luaopen_libspeech(lua_State *L) {
+ tnet_io_init(L);
+ return 1;
+}
diff --git a/tnet_io/Makefile b/tnet_io/Makefile
deleted file mode 100644
index a943886..0000000
--- a/tnet_io/Makefile
+++ /dev/null
@@ -1,13 +0,0 @@
-.PHONY: tnet
-all: tnet $(OBJ_DIR)/test
-$(OBJ_DIR)/test: $(OBJ_DIR)/cwrapper.o $(OBJ_DIR)/test.o $(OBJ_DIR)/libKaldiLib.a
- g++ -o $@ $^ -Wl,-rpath=$(BUILD_DIR)/objs -L$(BUILD_DIR)/objs -lnerv -Wl,-rpath=$(BUILD_DIR)/luajit-2.0/lib -L../../build/luajit-2.0/lib -lluajit-5.1
-$(OBJ_DIR)/cwrapper.o: cwrapper.cpp
- g++ -o $@ -c $< -DHAVE_ATLAS -I KaldiLib/ -g
-$(OBJ_DIR)/%.o: %.c
- g++ -o $@ -c $< -g
-clean:
- -rm $(OBJ_DIR)/*.o
- $(MAKE) -C KaldiLib/ clean
-tnet:
- $(MAKE) -C KaldiLib/
diff --git a/tnet_io/cwrapper.cpp b/tnet_io/cwrapper.cpp
index f992731..df4bdef 100644
--- a/tnet_io/cwrapper.cpp
+++ b/tnet_io/cwrapper.cpp
@@ -91,7 +91,7 @@ extern "C" {
if (repo->cvn_mask)
free(repo->cvn_path);
free(repo->p_deriv_win_lenghts);
- free(repo);
+ delete repo;
}
struct TNetLabelRepo {
@@ -131,6 +131,6 @@ extern "C" {
}
void tnet_label_repo_destroy(TNetLabelRepo *repo) {
- free(repo);
+ delete repo;
}
}
diff --git a/tnet_io/init.c b/tnet_io/init.c
new file mode 100644
index 0000000..7195eb7
--- /dev/null
+++ b/tnet_io/init.c
@@ -0,0 +1,103 @@
+#include "../../common.h"
+#include "cwrapper.h"
+#include <stdio.h>
+
+const char *nerv_tnet_feat_repo_tname = "nerv.TNetFeatureRepo";
+const char *nerv_tnet_label_repo_tname = "nerv.TNetLabelRepo";
+extern const char *nerv_matrix_host_float_tname;
+
+static int feat_repo_new(lua_State *L) {
+ const char *scp_file = luaL_checkstring(L, 1);
+ const char *conf = luaL_checkstring(L, 2);
+ int frm_ext = luaL_checkinteger(L, 3);
+ TNetFeatureRepo *repo = tnet_feature_repo_new(scp_file, conf, frm_ext);
+ luaT_pushudata(L, repo, nerv_tnet_feat_repo_tname);
+ return 1;
+}
+
+static int feat_repo_destroy(lua_State *L) {
+ TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname);
+ tnet_feature_repo_destroy(repo);
+ return 0;
+}
+
+static int feat_repo_current_tag(lua_State *L) {
+ TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname);
+ lua_pushstring(L, tnet_feature_repo_current_tag(repo));
+ return 1;
+}
+
+static int feat_repo_current_utterance(lua_State *L) {
+ TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname);
+ Matrix *utter = tnet_feature_repo_read_utterance(repo);
+ luaT_pushudata(L, utter, nerv_matrix_host_float_tname);
+ return 1;
+}
+
+static int feat_repo_next(lua_State *L) {
+ TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname);
+ tnet_feature_repo_next(repo);
+ return 0;
+}
+
+static const luaL_Reg feat_repo_methods[] = {
+ {"cur_utter", feat_repo_current_utterance},
+ {"cur_tag", feat_repo_current_tag},
+ {"next", feat_repo_next},
+ {NULL, NULL}
+};
+
+static int label_repo_new(lua_State *L) {
+ const char *mlf_file = luaL_checkstring(L, 1);
+ const char *fmt = luaL_checkstring(L, 2);
+ const char *arg = luaL_checkstring(L, 3);
+ const char *dir = luaL_checkstring(L, 4);
+ const char *ext = luaL_checkstring(L, 5);
+ TNetLabelRepo *repo = tnet_label_repo_new(
+ mlf_file, fmt, arg,
+ dir, ext);
+ luaT_pushudata(L, repo, nerv_tnet_label_repo_tname);
+ return 1;
+}
+
+static int label_repo_read_utterance(lua_State *L) {
+ TNetLabelRepo *repo = luaT_checkudata(L, 1, nerv_tnet_label_repo_tname);
+ TNetFeatureRepo *feat_repo = luaT_checkudata(L, 2, nerv_tnet_feat_repo_tname);
+ size_t frames = luaL_checkinteger(L, 3);
+ Matrix *utter = tnet_label_repo_read_utterance(repo,
+ frames,
+ tnet_feature_repo_current_samplerate(feat_repo),
+ tnet_feature_repo_current_tag(feat_repo));
+ luaT_pushudata(L, utter, nerv_matrix_host_float_tname);
+ return 1;
+}
+
+static int label_repo_destroy(lua_State *L) {
+ TNetLabelRepo *repo = luaT_checkudata(L, 1, nerv_tnet_label_repo_tname);
+ tnet_label_repo_destroy(repo);
+ return 0;
+}
+
+static const luaL_Reg label_repo_methods[] = {
+ {"get_utter", label_repo_read_utterance},
+ {NULL, NULL}
+};
+
+static void feat_repo_init(lua_State *L) {
+ luaT_newmetatable(L, nerv_tnet_feat_repo_tname, NULL,
+ feat_repo_new, feat_repo_destroy, NULL);
+ luaL_register(L, NULL, feat_repo_methods);
+ lua_pop(L, 1);
+}
+
+static void label_repo_init(lua_State *L) {
+ luaT_newmetatable(L, nerv_tnet_label_repo_tname, NULL,
+ label_repo_new, label_repo_destroy, NULL);
+ luaL_register(L, NULL, label_repo_methods);
+ lua_pop(L, 1);
+}
+
+void tnet_io_init(lua_State *L) {
+ feat_repo_init(L);
+ label_repo_init(L);
+}
diff --git a/tnet_io/test.c b/tnet_io/test.c
index 397b249..a5b8f55 100644
--- a/tnet_io/test.c
+++ b/tnet_io/test.c
@@ -1,41 +1,39 @@
#include "cwrapper.h"
#include <stdio.h>
-extern "C" {
- void print_nerv_matrix(Matrix *mat) {
- int n = mat->nrow;
- int m = mat->ncol;
- int i, j;
- size_t stride = mat->stride;
- for (i = 0; i < n; i++)
- {
+void print_nerv_matrix(Matrix *mat) {
+ int n = mat->nrow;
+ int m = mat->ncol;
+ int i, j;
+ size_t stride = mat->stride;
+ for (i = 0; i < n; i++)
+ {
- float *nerv_row = (float *)((char *)mat->data.f + i * stride);
- for (j = 0; j < m; j++)
- fprintf(stderr, "%.8f ", nerv_row[j]);
- fprintf(stderr, "\n");
- }
+ float *nerv_row = (float *)((char *)mat->data.f + i * stride);
+ for (j = 0; j < m; j++)
+ fprintf(stderr, "%.8f ", nerv_row[j]);
+ fprintf(stderr, "\n");
}
+}
- int main() {
- fprintf(stderr, "init repo\n");
- TNetFeatureRepo *feat_repo = tnet_feature_repo_new(
- "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
- "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf", 5);
- Matrix *feat_utter;
- feat_utter = tnet_feature_repo_read_utterance(feat_repo);
+int main() {
+ fprintf(stderr, "init repo\n");
+ TNetFeatureRepo *feat_repo = tnet_feature_repo_new(
+ "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf", 5);
+ Matrix *feat_utter;
+ feat_utter = tnet_feature_repo_read_utterance(feat_repo);
- TNetLabelRepo *lab_repo = tnet_label_repo_new(
- "/slfs1/users/mfy43/swb_ivec/ref.mlf",
- "map",
- "/slfs1/users/mfy43/swb_ivec/dict",
- "*/",
- "lab");
- Matrix *lab_utter = tnet_label_repo_read_utterance(lab_repo,
- feat_utter->nrow - 5 * 2,
- tnet_feature_repo_current_samplerate(feat_repo),
- tnet_feature_repo_current_tag(feat_repo));
- print_nerv_matrix(lab_utter);
- return 0;
- }
+ TNetLabelRepo *lab_repo = tnet_label_repo_new(
+ "/slfs1/users/mfy43/swb_ivec/ref.mlf",
+ "map",
+ "/slfs1/users/mfy43/swb_ivec/dict",
+ "*/",
+ "lab");
+ Matrix *lab_utter = tnet_label_repo_read_utterance(lab_repo,
+ feat_utter->nrow - 5 * 2,
+ tnet_feature_repo_current_samplerate(feat_repo),
+ tnet_feature_repo_current_tag(feat_repo));
+ print_nerv_matrix(lab_utter);
+ return 0;
}
diff --git a/tnet_io/tnet.mk b/tnet_io/tnet.mk
index 775fe85..ba4862b 100644
--- a/tnet_io/tnet.mk
+++ b/tnet_io/tnet.mk
@@ -47,7 +47,7 @@ CUDA_TK_BASE=/usr/local/cuda
# compilation args
-CXXFLAGS += -g -Wall -O2 -DHAVE_ATLAS -rdynamic
+CXXFLAGS += -g -Wall -O2 -DHAVE_ATLAS -rdynamic -fPIC
CXXFLAGS += -Wshadow -Wpointer-arith -Wcast-qual -Wcast-align -Wwrite-strings -Wconversion
# enable double-precision