aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--embedding_example/.gitignore2
-rw-r--r--embedding_example/Makefile9
-rw-r--r--embedding_example/main.c95
-rw-r--r--embedding_example/setup_nerv.lua26
-rw-r--r--embedding_example/swb_baseline_decode.lua109
-rw-r--r--nerv/Makefile2
-rw-r--r--nerv/layer/init.lua1
-rw-r--r--nerv/lib/matrix/generic/matrix.c4
-rw-r--r--nerv/lib/matrix/generic/matrix.h2
9 files changed, 247 insertions, 3 deletions
diff --git a/embedding_example/.gitignore b/embedding_example/.gitignore
new file mode 100644
index 0000000..8e68213
--- /dev/null
+++ b/embedding_example/.gitignore
@@ -0,0 +1,2 @@
+main
+main.o
diff --git a/embedding_example/Makefile b/embedding_example/Makefile
new file mode 100644
index 0000000..e4ee314
--- /dev/null
+++ b/embedding_example/Makefile
@@ -0,0 +1,9 @@
+CFLAG += -I ../install/include/luajit-2.0/ -I ../install/include/nerv/
+LDFLAG += -L../install/lib/ -lluajit-5.1 -Wl,-rpath=../install/lib/ -lluaT -lnervcore
+GCC := gcc
+
+main: main.o
+ $(GCC) -o $@ $< $(LDFLAG)
+
+main.o: main.c
+ $(GCC) $(CFLAG) -o $@ $< -c
diff --git a/embedding_example/main.c b/embedding_example/main.c
new file mode 100644
index 0000000..4e70892
--- /dev/null
+++ b/embedding_example/main.c
@@ -0,0 +1,95 @@
+#include "lua.h"
+#include "lauxlib.h"
+#include "lualib.h"
+#include "matrix/matrix.h"
+#include "common.h"
+#include "luaT/luaT.h"
+#include <stdio.h>
+
+const char *nerv_matrix_host_float_tname = "nerv.MMatrixFloat";
+extern Matrix *nerv_matrix_host_float_create(long nrow, long ncol, Status *status);
+extern void nerv_matrix_host_float_data_retain(Matrix *self);
+extern void nerv_matrix_host_float_data_free(Matrix *self, Status *status);
+
+lua_State *L;
+Matrix *input, *output;
+Status status;
+
+void setup_nerv() {
+ L = lua_open();
+ luaL_openlibs(L);
+ luaL_loadfile(L, "setup_nerv.lua");
+ /* network configuration */
+ lua_pushstring(L, "swb_baseline_decode.lua");
+ if (lua_pcall(L, 1, LUA_MULTRET, 0))
+ {
+ printf("%s\n", luaL_checkstring(L, 1));
+ exit(1);
+ }
+ /* lua stack now: input width, output width, propagator */
+ input = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 1), &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ output = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 2), &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+}
+
+
+void propagate(float for_fun) {
+ int i, j;
+ printf("ok: %d\n", lua_gettop(L));
+ lua_pushvalue(L, 3);
+ /* lua stack now: input width, output width, propagator, propagator */
+ for (i = 0; i < input->nrow; i++) /* nrow is actually 1 */
+ {
+ float *nerv_row = (float *)((char *)input->data.f + i * input->stride);
+ for (j = 0; j < input->ncol; j++)
+ {
+ nerv_row[j] = j * for_fun;
+ }
+ }
+ /* avoid gc */
+ nerv_matrix_host_float_data_retain(input);
+ nerv_matrix_host_float_data_retain(input);
+ nerv_matrix_host_float_data_retain(input);
+ nerv_matrix_host_float_data_retain(input);
+ nerv_matrix_host_float_data_retain(output);
+ nerv_matrix_host_float_data_retain(output);
+ nerv_matrix_host_float_data_retain(output);
+ nerv_matrix_host_float_data_retain(output);
+
+ luaT_pushudata(L, input, nerv_matrix_host_float_tname);
+ luaT_pushudata(L, output, nerv_matrix_host_float_tname);
+ /* lua stack now: input width, output width, propagator, propagator, input, output */
+ if (lua_pcall(L, 2, 0, 0)) /* call propagator with two parameters */
+ {
+ printf("%s\n", luaL_checkstring(L, -1));
+ exit(-1);
+ }
+ /* lua stack now: input width, output width, propagator */
+ printf("## caller ##\n");
+ for (i = 0; i < output->nrow; i++) /* nrow is actually 1 */
+ {
+ float *nerv_row = (float *)((char *)output->data.f + i * output->stride);
+ for (j = 0; j < output->ncol; j++)
+ {
+ printf("%.8f ", nerv_row[j]);
+ }
+ printf("\n");
+ }
+}
+
+void teardown_nerv() {
+ nerv_matrix_host_float_data_free(input, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ nerv_matrix_host_float_data_free(output, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+}
+
+int main() {
+ setup_nerv();
+ propagate(1.0);
+ propagate(2.0);
+ propagate(3.0);
+ teardown_nerv();
+ return 0;
+}
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua
new file mode 100644
index 0000000..e33a1e7
--- /dev/null
+++ b/embedding_example/setup_nerv.lua
@@ -0,0 +1,26 @@
+package.path="/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;"..package.path
+package.cpath="/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;"..package.cpath
+local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1")
+
+local args = {...}
+require 'nerv'
+dofile(args[1])
+local param_repo = nerv.ParamRepo()
+param_repo:import(gconf.initialized_param, nil, gconf)
+local sublayer_repo = make_sublayer_repo(param_repo)
+local layer_repo = make_layer_repo(sublayer_repo, param_repo)
+local network = get_network(layer_repo)
+local batch_size = 1
+network:init(batch_size)
+function propagator(input, output)
+ local gpu_input = nerv.CuMatrixFloat(input:nrow(), input:ncol())
+ local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol())
+ gpu_input:copy_fromh(input)
+ print(gpu_input)
+ network:propagate({gpu_input}, {gpu_output})
+ gpu_output:copy_toh(output)
+ print(output)
+ -- collect garbage in-time to save GPU memory
+ collectgarbage("collect")
+end
+return network.dim_in[1], network.dim_out[1], propagator
diff --git a/embedding_example/swb_baseline_decode.lua b/embedding_example/swb_baseline_decode.lua
new file mode 100644
index 0000000..14a463b
--- /dev/null
+++ b/embedding_example/swb_baseline_decode.lua
@@ -0,0 +1,109 @@
+require 'htk_io'
+gconf = {lrate = 0.8, wcost = 1e-6, momentum = 0.9,
+ cumat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
+ frm_ext = 5,
+ tr_scp = "/slfs1/users/mfy43/swb_ivec/train_bp.scp",
+ cv_scp = "/slfs1/users/mfy43/swb_ivec/train_cv.scp",
+ htk_conf = "/slfs1/users/mfy43/swb_ivec/plp_0_d_a.conf",
+ initialized_param = {"/slfs1/users/mfy43/swb_init.nerv",
+ "/slfs1/users/mfy43/swb_global_transf.nerv"},
+ debug = false}
+
+function make_sublayer_repo(param_repo)
+ return nerv.LayerRepo(
+ {
+ -- global transf
+ ["nerv.BiasLayer"] =
+ {
+ blayer1 = {{bias = "bias1"}, {dim_in = {429}, dim_out = {429}}},
+ blayer2 = {{bias = "bias2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ ["nerv.WindowLayer"] =
+ {
+ wlayer1 = {{window = "window1"}, {dim_in = {429}, dim_out = {429}}},
+ wlayer2 = {{window = "window2"}, {dim_in = {429}, dim_out = {429}}}
+ },
+ -- biased linearity
+ ["nerv.AffineLayer"] =
+ {
+ affine0 = {{ltp = "affine0_ltp", bp = "affine0_bp"},
+ {dim_in = {429}, dim_out = {2048}}},
+ affine1 = {{ltp = "affine1_ltp", bp = "affine1_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine2 = {{ltp = "affine2_ltp", bp = "affine2_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine3 = {{ltp = "affine3_ltp", bp = "affine3_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine4 = {{ltp = "affine4_ltp", bp = "affine4_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine5 = {{ltp = "affine5_ltp", bp = "affine5_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine6 = {{ltp = "affine6_ltp", bp = "affine6_bp"},
+ {dim_in = {2048}, dim_out = {2048}}},
+ affine7 = {{ltp = "affine7_ltp", bp = "affine7_bp"},
+ {dim_in = {2048}, dim_out = {3001}}}
+ },
+ ["nerv.SigmoidLayer"] =
+ {
+ sigmoid0 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid1 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid2 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid3 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid4 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid5 = {{}, {dim_in = {2048}, dim_out = {2048}}},
+ sigmoid6 = {{}, {dim_in = {2048}, dim_out = {2048}}}
+ },
+ ["nerv.SoftmaxLayer"] =
+ {
+ soutput = {{}, {dim_in = {3001}, dim_out = {3001}}}
+ }
+ }, param_repo, gconf)
+end
+
+function make_layer_repo(sublayer_repo, param_repo)
+ return nerv.LayerRepo(
+ {
+ ["nerv.DAGLayer"] =
+ {
+ global_transf = {{}, {
+ dim_in = {429}, dim_out = {429},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "blayer1[1]",
+ ["blayer1[1]"] = "wlayer1[1]",
+ ["wlayer1[1]"] = "blayer2[1]",
+ ["blayer2[1]"] = "wlayer2[1]",
+ ["wlayer2[1]"] = "<output>[1]"
+ }
+ }},
+ main = {{}, {
+ dim_in = {429}, dim_out = {3001},
+ sub_layers = sublayer_repo,
+ connections = {
+ ["<input>[1]"] = "affine0[1]",
+ ["affine0[1]"] = "sigmoid0[1]",
+ ["sigmoid0[1]"] = "affine1[1]",
+ ["affine1[1]"] = "sigmoid1[1]",
+ ["sigmoid1[1]"] = "affine2[1]",
+ ["affine2[1]"] = "sigmoid2[1]",
+ ["sigmoid2[1]"] = "affine3[1]",
+ ["affine3[1]"] = "sigmoid3[1]",
+ ["sigmoid3[1]"] = "affine4[1]",
+ ["affine4[1]"] = "sigmoid4[1]",
+ ["sigmoid4[1]"] = "affine5[1]",
+ ["affine5[1]"] = "sigmoid5[1]",
+ ["sigmoid5[1]"] = "affine6[1]",
+ ["affine6[1]"] = "sigmoid6[1]",
+ ["sigmoid6[1]"] = "affine7[1]",
+ ["affine7[1]"] = "soutput[1]",
+ ["soutput[1]"] = "<output>[1]"
+ }
+ }}
+ }
+ }, param_repo, gconf)
+end
+
+function get_network(layer_repo)
+ return layer_repo:get_layer("main")
+end
diff --git a/nerv/Makefile b/nerv/Makefile
index 022e2fb..fdffd12 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -30,7 +30,7 @@ LUAT_OBJS := $(addprefix $(OBJ_DIR)/,$(LUAT_OBJS))
OBJS := $(CORE_OBJS) $(NERV_OBJS) $(LUAT_OBJS)
LIBS := $(INST_LIBDIR)/libnerv.so $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT.so
LUA_LIBS := matrix/init.lua io/init.lua init.lua \
- layer/init.lua layer/affine.lua layer/sigmoid.lua layer/softmax_ce.lua \
+ layer/init.lua layer/affine.lua layer/sigmoid.lua layer/softmax_ce.lua layer/softmax.lua \
layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua layer/affine_recurrent.lua\
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
io/sgd_buffer.lua
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 7172f99..6861b0e 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -78,3 +78,4 @@ nerv.include('window.lua')
nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('affine_recurrent.lua')
+nerv.include('softmax.lua')
diff --git a/nerv/lib/matrix/generic/matrix.c b/nerv/lib/matrix/generic/matrix.c
index e4afa37..6cb3dc0 100644
--- a/nerv/lib/matrix/generic/matrix.c
+++ b/nerv/lib/matrix/generic/matrix.c
@@ -3,7 +3,7 @@
#include "matrix.h"
/* FIXME: malloc failure detection */
-static void nerv_matrix_(data_free)(Matrix *self, Status *status) {
+void nerv_matrix_(data_free)(Matrix *self, Status *status) {
assert(*self->data_ref > 0);
if (--(*self->data_ref) == 0)
{
@@ -18,7 +18,7 @@ static void nerv_matrix_(data_free)(Matrix *self, Status *status) {
}
}
-static void nerv_matrix_(data_retain)(Matrix *self) {
+void nerv_matrix_(data_retain)(Matrix *self) {
(*self->data_ref)++;
}
diff --git a/nerv/lib/matrix/generic/matrix.h b/nerv/lib/matrix/generic/matrix.h
index 9d44e6d..69b4e6d 100644
--- a/nerv/lib/matrix/generic/matrix.h
+++ b/nerv/lib/matrix/generic/matrix.h
@@ -2,3 +2,5 @@
Matrix *nerv_matrix_(create)(long nrow, long ncol, Status *status);
void nerv_matrix_(destroy)(Matrix *self, Status *status);
Matrix *nerv_matrix_(getrow)(Matrix *self, int row);
+void nerv_matrix_(data_free)(Matrix *self, Status *status);
+void nerv_matrix_(data_retain)(Matrix *self);