diff options
Diffstat (limited to 'tnet_io')
-rw-r--r-- | tnet_io/cwrapper.cpp | 15 | ||||
-rw-r--r-- | tnet_io/cwrapper.h | 5 | ||||
-rw-r--r-- | tnet_io/init.c | 4 | ||||
-rw-r--r-- | tnet_io/tnet.mk | 2 |
4 files changed, 14 insertions, 12 deletions
diff --git a/tnet_io/cwrapper.cpp b/tnet_io/cwrapper.cpp index df4bdef..e82f3f8 100644 --- a/tnet_io/cwrapper.cpp +++ b/tnet_io/cwrapper.cpp @@ -2,6 +2,7 @@ #include "KaldiLib/Labels.h" #include "KaldiLib/Common.h" #include "KaldiLib/UserInterface.h" +#include "../../common.h" #include <string> #define SNAME "TNET" @@ -9,7 +10,7 @@ extern "C" { #include "cwrapper.h" #include "string.h" - extern Matrix *nerv_matrix_host_float_new_(long nrow, long ncol); + extern Matrix *nerv_matrix_host_float_new_(lua_State *L, long nrow, long ncol); struct TNetFeatureRepo { TNet::FeatureRepository feature_repo; @@ -33,7 +34,6 @@ extern "C" { TNetFeatureRepo *tnet_feature_repo_new(const char *p_script, const char *config, int context) { TNetFeatureRepo *repo = new TNetFeatureRepo(); repo->ui.ReadConfig(config); - fprintf(stderr, "read config\n"); repo->swap_features = !repo->ui.GetBool(SNAME":NATURALREADORDER", TNet::IsBigEndian()); /* load defaults */ repo->target_kind = repo->ui.GetFeatureParams(&repo->deriv_order, @@ -53,14 +53,14 @@ extern "C" { return repo; } - Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo) { + Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo, lua_State *L) { Matrix *mat; /* nerv implementation */ repo->feature_repo.ReadFullMatrix(repo->feats_host); std::string utter_str = repo->feature_repo.Current().Logical(); repo->feats_host.CheckData(utter_str); int n = repo->feats_host.Rows(); int m = repo->feats_host.Cols(); - mat = nerv_matrix_host_float_new_(n, m); + mat = nerv_matrix_host_float_new_(L, n, m); size_t stride = mat->stride; fprintf(stderr, "[tnet] feature: %s %d %d\n", utter_str.c_str(), n, m); for (int i = 0; i < n; i++) @@ -68,7 +68,7 @@ extern "C" { float *row = repo->feats_host.pRowData(i); float *nerv_row = (float *)((char *)mat->data.f + i * stride); /* use memmove to copy the row, since KaldiLib uses compact storage */ - memmove(nerv_row, row, sizeof(float) * m); + memmove(nerv_row, row, sizeof(float) * m); } return mat; } @@ -110,14 +110,15 @@ extern "C" { Matrix *tnet_label_repo_read_utterance(TNetLabelRepo *repo, size_t frames, size_t sample_rate, - const char *tag) { + const char *tag, + lua_State *L) { std::vector<TNet::Matrix<float> > labs_hosts; /* KaldiLib implementation */ Matrix *mat; repo->label_repo.GenDesiredMatrixExt(labs_hosts, frames, sample_rate, tag); int n = labs_hosts[0].Rows(); int m = labs_hosts[0].Cols(); - mat = nerv_matrix_host_float_new_(n, m); + mat = nerv_matrix_host_float_new_(L, n, m); size_t stride = mat->stride; fprintf(stderr, "[tnet] label: %s %d %d\n", tag, n, m); for (int i = 0; i < n; i++) diff --git a/tnet_io/cwrapper.h b/tnet_io/cwrapper.h index 7a9449a..810901b 100644 --- a/tnet_io/cwrapper.h +++ b/tnet_io/cwrapper.h @@ -9,7 +9,7 @@ extern "C" { TNetFeatureRepo *tnet_feature_repo_new(const char *scp, const char *config, int context); - Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo); + Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo, lua_State *L); size_t tnet_feature_repo_current_samplerate(TNetFeatureRepo *repo); const char *tnet_feature_repo_current_tag(TNetFeatureRepo *repo); void tnet_feature_repo_next(TNetFeatureRepo *repo); @@ -24,7 +24,8 @@ extern "C" { Matrix *tnet_label_repo_read_utterance(TNetLabelRepo *repo, size_t frames, size_t sample_rate, - const char *tag); + const char *tag, + lua_State *L); void tnet_label_repo_destroy(TNetLabelRepo *repo); #ifdef __cplusplus diff --git a/tnet_io/init.c b/tnet_io/init.c index 7195eb7..3fa7cb8 100644 --- a/tnet_io/init.c +++ b/tnet_io/init.c @@ -29,7 +29,7 @@ static int feat_repo_current_tag(lua_State *L) { static int feat_repo_current_utterance(lua_State *L) { TNetFeatureRepo *repo = luaT_checkudata(L, 1, nerv_tnet_feat_repo_tname); - Matrix *utter = tnet_feature_repo_read_utterance(repo); + Matrix *utter = tnet_feature_repo_read_utterance(repo, L); luaT_pushudata(L, utter, nerv_matrix_host_float_tname); return 1; } @@ -67,7 +67,7 @@ static int label_repo_read_utterance(lua_State *L) { Matrix *utter = tnet_label_repo_read_utterance(repo, frames, tnet_feature_repo_current_samplerate(feat_repo), - tnet_feature_repo_current_tag(feat_repo)); + tnet_feature_repo_current_tag(feat_repo), L); luaT_pushudata(L, utter, nerv_matrix_host_float_tname); return 1; } diff --git a/tnet_io/tnet.mk b/tnet_io/tnet.mk index ba4862b..9f933db 100644 --- a/tnet_io/tnet.mk +++ b/tnet_io/tnet.mk @@ -40,7 +40,7 @@ AS = $(CROSS_COMPILE)as ############################################################## ##### PATH TO CUDA TOOLKIT ##### #CUDA_TK_BASE=/usr/local/share/cuda-3.2.12 -CUDA_TK_BASE=/usr/local/cuda +#CUDA_TK_BASE=/usr/local/cuda ##### PATH TO CUDA TOOLKIT ##### |