summaryrefslogtreecommitdiff
path: root/tnet_io/cwrapper.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'tnet_io/cwrapper.cpp')
-rw-r--r--tnet_io/cwrapper.cpp15
1 files changed, 8 insertions, 7 deletions
diff --git a/tnet_io/cwrapper.cpp b/tnet_io/cwrapper.cpp
index df4bdef..e82f3f8 100644
--- a/tnet_io/cwrapper.cpp
+++ b/tnet_io/cwrapper.cpp
@@ -2,6 +2,7 @@
#include "KaldiLib/Labels.h"
#include "KaldiLib/Common.h"
#include "KaldiLib/UserInterface.h"
+#include "../../common.h"
#include <string>
#define SNAME "TNET"
@@ -9,7 +10,7 @@ extern "C" {
#include "cwrapper.h"
#include "string.h"
- extern Matrix *nerv_matrix_host_float_new_(long nrow, long ncol);
+ extern Matrix *nerv_matrix_host_float_new_(lua_State *L, long nrow, long ncol);
struct TNetFeatureRepo {
TNet::FeatureRepository feature_repo;
@@ -33,7 +34,6 @@ extern "C" {
TNetFeatureRepo *tnet_feature_repo_new(const char *p_script, const char *config, int context) {
TNetFeatureRepo *repo = new TNetFeatureRepo();
repo->ui.ReadConfig(config);
- fprintf(stderr, "read config\n");
repo->swap_features = !repo->ui.GetBool(SNAME":NATURALREADORDER", TNet::IsBigEndian());
/* load defaults */
repo->target_kind = repo->ui.GetFeatureParams(&repo->deriv_order,
@@ -53,14 +53,14 @@ extern "C" {
return repo;
}
- Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo) {
+ Matrix *tnet_feature_repo_read_utterance(TNetFeatureRepo *repo, lua_State *L) {
Matrix *mat; /* nerv implementation */
repo->feature_repo.ReadFullMatrix(repo->feats_host);
std::string utter_str = repo->feature_repo.Current().Logical();
repo->feats_host.CheckData(utter_str);
int n = repo->feats_host.Rows();
int m = repo->feats_host.Cols();
- mat = nerv_matrix_host_float_new_(n, m);
+ mat = nerv_matrix_host_float_new_(L, n, m);
size_t stride = mat->stride;
fprintf(stderr, "[tnet] feature: %s %d %d\n", utter_str.c_str(), n, m);
for (int i = 0; i < n; i++)
@@ -68,7 +68,7 @@ extern "C" {
float *row = repo->feats_host.pRowData(i);
float *nerv_row = (float *)((char *)mat->data.f + i * stride);
/* use memmove to copy the row, since KaldiLib uses compact storage */
- memmove(nerv_row, row, sizeof(float) * m);
+ memmove(nerv_row, row, sizeof(float) * m);
}
return mat;
}
@@ -110,14 +110,15 @@ extern "C" {
Matrix *tnet_label_repo_read_utterance(TNetLabelRepo *repo,
size_t frames,
size_t sample_rate,
- const char *tag) {
+ const char *tag,
+ lua_State *L) {
std::vector<TNet::Matrix<float> > labs_hosts; /* KaldiLib implementation */
Matrix *mat;
repo->label_repo.GenDesiredMatrixExt(labs_hosts, frames,
sample_rate, tag);
int n = labs_hosts[0].Rows();
int m = labs_hosts[0].Cols();
- mat = nerv_matrix_host_float_new_(n, m);
+ mat = nerv_matrix_host_float_new_(L, n, m);
size_t stride = mat->stride;
fprintf(stderr, "[tnet] label: %s %d %d\n", tag, n, m);
for (int i = 0; i < n; i++)