summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-11 20:09:06 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-11 20:09:06 +0800
commit54b33aa3a95f5a7a023e9ea453094ae081c91f64 (patch)
tree7d170d45566aad6803c4949cf4ec144b584b580d
parent046d6d30b88462407c4acde3d7c47e94ef083b94 (diff)
adapt to new network architecture
-rw-r--r--htk_io/tools/nerv_to_tnet.lua5
-rw-r--r--htk_io/tools/tnet_to_nerv.cpp49
-rwxr-xr-xkaldi_io/tools/kaldi_to_nervbin18719 -> 34935 bytes
-rw-r--r--kaldi_io/tools/nerv_to_kaldi.lua1
-rw-r--r--speech_utils/init.lua11
5 files changed, 46 insertions, 20 deletions
diff --git a/htk_io/tools/nerv_to_tnet.lua b/htk_io/tools/nerv_to_tnet.lua
index c0ac76b..17ff3db 100644
--- a/htk_io/tools/nerv_to_tnet.lua
+++ b/htk_io/tools/nerv_to_tnet.lua
@@ -1,8 +1,9 @@
--- usage: nerv config_file nerv_param_input tnet_output
+-- usage: nerv nerv_to_tnet.lua config_file nerv_param_input tnet_param_output
dofile(arg[1])
+gconf.mmat_type = nerv.MMatrixFloat
param_repo = nerv.ParamRepo()
-param_repo:import({arg[2], gconf.initialized_param[2]}, nil, gconf)
+param_repo:import({arg[2]}, nil, gconf)
layer_repo = make_layer_repo(param_repo)
f = assert(io.open(arg[3], "w"))
diff --git a/htk_io/tools/tnet_to_nerv.cpp b/htk_io/tools/tnet_to_nerv.cpp
index a779a25..63a104d 100644
--- a/htk_io/tools/tnet_to_nerv.cpp
+++ b/htk_io/tools/tnet_to_nerv.cpp
@@ -2,34 +2,54 @@
#include <fstream>
#include <string>
#include <cstring>
+#include <cassert>
#include <cstdlib>
+
char token[1024];
char output[1024];
-double **mat;
+
+double **new_matrix(int nrow, int ncol) {
+ double **mat = new double *[nrow];
+ int i;
+ for (i = 0; i < nrow; i++)
+ mat[i] = new double[ncol];
+ return mat;
+}
+
+void free_matrix(double **mat, int nrow, int ncol) {
+ int i;
+ for (i = 0; i < nrow; i++)
+ delete [] mat[i];
+ delete [] mat;
+}
+
int main(int argc, char **argv) {
+ FILE *fin;
std::ofstream fout;
- fout.open(argv[1]);
- int cnt = 0;
- while (scanf("%s", token) != EOF)
+ assert(argc >= 3);
+ fin = fopen(argv[1], "r");
+ fout.open(argv[2]);
+ assert(fin != NULL);
+ int cnt = argc > 3 ? atoi(argv[3]) : 0;
+ while (fscanf(fin, "%s", token) != EOF)
{
int nrow, ncol;
int i, j;
+ double **mat;
if (strcmp(token, "<biasedlinearity>") == 0)
{
- scanf("%d %d", &ncol, &nrow);
- scanf("%s %d %d", token, &ncol, &nrow);
+ fscanf(fin, "%d %d", &ncol, &nrow);
+ fscanf(fin, "%s %d %d", token, &ncol, &nrow);
printf("%d %d\n", nrow, ncol);
- mat = (double **)malloc(nrow * sizeof(double *));
- for (i = 0; i < nrow; i++)
- mat[i] = (double *)malloc(ncol * sizeof(double));
+ mat = new_matrix(nrow, ncol);
for (j = 0; j < ncol; j++)
for (i = 0; i < nrow; i++)
- scanf("%lf", mat[i] + j);
+ fscanf(fin, "%lf", mat[i] + j);
long base = fout.tellp();
sprintf(output, "%16d", 0);
fout << output;
sprintf(output, "{type=\"nerv.LinearTransParam\",id=\"affine%d_ltp\"}\n",
- cnt);
+ cnt);
fout << output;
sprintf(output, "%d %d\n", nrow, ncol);
fout << output;
@@ -38,19 +58,17 @@ int main(int argc, char **argv) {
for (j = 0; j < ncol; j++)
fout << mat[i][j] << " ";
fout << std::endl;
- free(mat[i]);
}
- free(mat);
long length = fout.tellp() - base;
fout.seekp(base);
sprintf(output, "[%13lu]\n", length);
fout << output;
fout.seekp(0, std::ios_base::end);
- if (scanf("%s %d", token, &ncol) == 2 && *token == 'v')
+ if (fscanf(fin, "%s %d", token, &ncol) == 2 && *token == 'v')
{
base = fout.tellp();
for (j = 0; j < ncol; j++)
- scanf("%lf", mat[0] + j);
+ fscanf(fin, "%lf", mat[0] + j);
sprintf(output, "%16d", 0);
fout << output;
sprintf(output, "{type=\"nerv.BiasParam\",id=\"affine%d_bp\"}\n",
@@ -68,6 +86,7 @@ int main(int argc, char **argv) {
fout.seekp(0, std::ios_base::end);
cnt++;
}
+ free_matrix(mat, nrow, ncol);
}
}
return 0;
diff --git a/kaldi_io/tools/kaldi_to_nerv b/kaldi_io/tools/kaldi_to_nerv
index 78469f8..08a3d76 100755
--- a/kaldi_io/tools/kaldi_to_nerv
+++ b/kaldi_io/tools/kaldi_to_nerv
Binary files differ
diff --git a/kaldi_io/tools/nerv_to_kaldi.lua b/kaldi_io/tools/nerv_to_kaldi.lua
index fba6a6c..0f0cb2a 100644
--- a/kaldi_io/tools/nerv_to_kaldi.lua
+++ b/kaldi_io/tools/nerv_to_kaldi.lua
@@ -1,6 +1,7 @@
-- usage: nerv nerv_to_kaldi.lua config_file nerv_param_input kaldi_param_output
dofile(arg[1])
+gconf.mmat_type = nerv.MMatrixFloat
param_repo = nerv.ParamRepo()
param_repo:import({arg[2]}, nil, gconf)
layer_repo = make_layer_repo(param_repo)
diff --git a/speech_utils/init.lua b/speech_utils/init.lua
index 9e8adba..0b65edc 100644
--- a/speech_utils/init.lua
+++ b/speech_utils/init.lua
@@ -1,13 +1,18 @@
nerv.speech_utils = {}
-function nerv.speech_utils.global_transf(feat_utter, global_transf,
+function nerv.speech_utils.global_transf(feat_utter, network,
frm_ext, frm_trim, gconf)
-- prepare for transf
local input = {feat_utter}
local output = {feat_utter:create()}
-- do transf
- global_transf:init(input[1]:nrow())
- global_transf:propagate(input, output)
+ local batch_size = input[1]:nrow()
+ network:mini_batch_init({seq_length = table.vector(batch_size, 1),
+ new_seq = {},
+ do_train = false,
+ input = {input},
+ output = {output}})
+ network:propagate()
-- trim frames
if gconf.use_cpu then
mat_type = gconf.mmat_type