summaryrefslogtreecommitdiff
path: root/htk_io/tools
diff options
context:
space:
mode:
authorYimmon Zhuang <[email protected]>2015-08-14 17:40:18 +0800
committerYimmon Zhuang <[email protected]>2015-08-14 17:40:18 +0800
commitdfdd17afc2e984ec6c32ea01290f5c76309a456a (patch)
treebfea20e97c200cf734021e3756d749c892e658a4 /htk_io/tools
parent70d52a3dc6c120fe76e1109e844303e2f5e61872 (diff)
parent10cce5f6a5c9e2f8e00d5a2a4d87c9cb7c26bf4c (diff)
solve dependencies
Diffstat (limited to 'htk_io/tools')
-rw-r--r--htk_io/tools/nerv_to_tnet.lua60
-rw-r--r--htk_io/tools/tnet_to_nerv.c2
-rw-r--r--htk_io/tools/tnet_to_nerv.cpp8
3 files changed, 68 insertions, 2 deletions
diff --git a/htk_io/tools/nerv_to_tnet.lua b/htk_io/tools/nerv_to_tnet.lua
new file mode 100644
index 0000000..c0ac76b
--- /dev/null
+++ b/htk_io/tools/nerv_to_tnet.lua
@@ -0,0 +1,60 @@
+-- usage: nerv config_file nerv_param_input tnet_output
+
+dofile(arg[1])
+param_repo = nerv.ParamRepo()
+param_repo:import({arg[2], gconf.initialized_param[2]}, nil, gconf)
+layer_repo = make_layer_repo(param_repo)
+f = assert(io.open(arg[3], "w"))
+
+function print_tnet_matrix(cumat)
+ local strs = {}
+ collectgarbage()
+ if cumat:nrow() == 1 then
+ local mat = nerv.MMatrixFloat(1, cumat:ncol())
+ cumat:copy_toh(mat)
+ table.insert(strs, string.format("v %d\n", mat:ncol()))
+ for j = 0, mat:ncol() - 1 do
+ table.insert(strs, string.format("%.8f ", mat[0][j]))
+ end
+ table.insert(strs, "\n")
+ f:write(table.concat(strs))
+ else
+ cumat = cumat:trans()
+ local mat = nerv.MMatrixFloat(cumat:nrow(), cumat:ncol())
+ cumat:copy_toh(mat)
+ table.insert(strs, string.format("m %d %d\n", mat:nrow(), mat:ncol()))
+ for i = 0, mat:nrow() - 1 do
+ local row = mat[i]
+ for j = 0, mat:ncol() - 1 do
+ table.insert(strs, string.format("%.8f ", row[j]))
+ end
+ table.insert(strs, "\n")
+ f:write(table.concat(strs))
+ strs = {}
+ end
+ end
+end
+local lnames = {"affine0", "sigmoid0",
+ "affine1", "sigmoid1",
+ "affine2", "sigmoid2",
+ "affine3", "sigmoid3",
+ "affine4", "sigmoid4",
+ "affine5", "sigmoid5",
+ "affine6", "sigmoid6",
+ "affine7", "ce_crit"}
+for i, name in ipairs(lnames) do
+ local layer = layer_repo:get_layer(name)
+ local layer_type = layer.__typename
+ if layer_type == "nerv.AffineLayer" then
+ f:write(string.format("<biasedlinearity> %d %d\n", layer.dim_out[1], layer.dim_in[1]))
+ print_tnet_matrix(layer.ltp.trans)
+ print_tnet_matrix(layer.bp.trans)
+ elseif layer_type == "nerv.SigmoidLayer" then
+ f:write(string.format("<sigmoid> %d %d\n", layer.dim_out[1], layer.dim_in[1]))
+ elseif layer_type == "nerv.SoftmaxCELayer" then
+ f:write(string.format("<softmax> %d %d\n", layer.dim_in[1], layer.dim_in[1]))
+ else
+ nerv.error("unknown layer type %s", layer_type)
+ end
+end
+f:close()
diff --git a/htk_io/tools/tnet_to_nerv.c b/htk_io/tools/tnet_to_nerv.c
index f781236..5774819 100644
--- a/htk_io/tools/tnet_to_nerv.c
+++ b/htk_io/tools/tnet_to_nerv.c
@@ -41,7 +41,7 @@ int main() {
fprintf(fout, "%16d", 0);
fprintf(fout, "{type=\"nerv.BiasParam\",id=\"affine%d_bp\"}\n",
cnt);
- fprintf(fout, "1 %d\n", nrow, ncol);
+ fprintf(fout, "1 %d\n", ncol);
for (j = 0; j < ncol; j++)
fprintf(fout, "%.8f ", mat[0][j]);
fprintf(fout, "\n");
diff --git a/htk_io/tools/tnet_to_nerv.cpp b/htk_io/tools/tnet_to_nerv.cpp
index bbfddcf..a779a25 100644
--- a/htk_io/tools/tnet_to_nerv.cpp
+++ b/htk_io/tools/tnet_to_nerv.cpp
@@ -2,9 +2,10 @@
#include <fstream>
#include <string>
#include <cstring>
+#include <cstdlib>
char token[1024];
char output[1024];
-double mat[4096][4096];
+double **mat;
int main(int argc, char **argv) {
std::ofstream fout;
fout.open(argv[1]);
@@ -18,6 +19,9 @@ int main(int argc, char **argv) {
scanf("%d %d", &ncol, &nrow);
scanf("%s %d %d", token, &ncol, &nrow);
printf("%d %d\n", nrow, ncol);
+ mat = (double **)malloc(nrow * sizeof(double *));
+ for (i = 0; i < nrow; i++)
+ mat[i] = (double *)malloc(ncol * sizeof(double));
for (j = 0; j < ncol; j++)
for (i = 0; i < nrow; i++)
scanf("%lf", mat[i] + j);
@@ -34,7 +38,9 @@ int main(int argc, char **argv) {
for (j = 0; j < ncol; j++)
fout << mat[i][j] << " ";
fout << std::endl;
+ free(mat[i]);
}
+ free(mat);
long length = fout.tellp() - base;
fout.seekp(base);
sprintf(output, "[%13lu]\n", length);