aboutsummaryrefslogtreecommitdiff
path: root/embedding_example
diff options
context:
space:
mode:
authortxh18 <[email protected]>2015-10-23 19:36:31 +0800
committertxh18 <[email protected]>2015-10-23 19:36:31 +0800
commit1234c026869ab052e898cc2541143fe4a22312b6 (patch)
treebd4b980ae12340b4ea3a8aa6259d43dc891b5568 /embedding_example
parentf0937ae6e6401f25f15bb0e83e764ca888e81f11 (diff)
parent64fce92b7845b716f3c168036691c37b2467d99b (diff)
Just come back, let's merge the new master
Merge branch 'master' into txh18/rnnlm
Diffstat (limited to 'embedding_example')
-rw-r--r--embedding_example/.gitignore2
-rw-r--r--embedding_example/Makefile20
-rw-r--r--embedding_example/main.c93
-rwxr-xr-xembedding_example/run.sh4
-rw-r--r--embedding_example/setup_nerv.lua25
5 files changed, 144 insertions, 0 deletions
diff --git a/embedding_example/.gitignore b/embedding_example/.gitignore
new file mode 100644
index 0000000..8e68213
--- /dev/null
+++ b/embedding_example/.gitignore
@@ -0,0 +1,2 @@
+main
+main.o
diff --git a/embedding_example/Makefile b/embedding_example/Makefile
new file mode 100644
index 0000000..73287f4
--- /dev/null
+++ b/embedding_example/Makefile
@@ -0,0 +1,20 @@
+CFLAG += -I ../install/include/luajit-2.0/ -I ../install/include/nerv/
+LDFLAG += -L../install/lib/ -lluajit-5.1 -Wl,-rpath=../install/lib/ -lluaT -lnervcore
+GCC := gcc
+
+.PHONY: FORCE
+
+all: main FORCE
+clean:
+ -rm -f *.o
+ -rm main
+
+FORCE: ../install/bin/luarocks
+ echo "#!/bin/bash" > run.sh
+ $< path >> run.sh
+ echo "./main" >> run.sh
+main: main.o
+ $(GCC) -o $@ $< $(LDFLAG)
+
+main.o: main.c
+ $(GCC) $(CFLAG) -o $@ $< -c
diff --git a/embedding_example/main.c b/embedding_example/main.c
new file mode 100644
index 0000000..8856d58
--- /dev/null
+++ b/embedding_example/main.c
@@ -0,0 +1,93 @@
+#include "lua.h"
+#include "lauxlib.h"
+#include "lualib.h"
+#include "matrix/matrix.h"
+#include "common.h"
+#include "luaT/luaT.h"
+#include <stdio.h>
+
+const char *nerv_matrix_host_float_tname = "nerv.MMatrixFloat";
+const char *input_name = "_nerv_embed_input";
+const char *output_name = "_nerv_embed_output";
+extern Matrix *nerv_matrix_host_float_create(long nrow, long ncol, Status *status);
+extern void nerv_matrix_host_float_data_retain(Matrix *self);
+extern void nerv_matrix_host_float_data_free(Matrix *self, Status *status);
+
+lua_State *L;
+Matrix *input, *output;
+Status status;
+
+void setup_nerv() {
+ L = lua_open();
+ luaL_openlibs(L);
+ luaL_loadfile(L, "setup_nerv.lua");
+ /* network configuration */
+ lua_pushstring(L, "../nerv/examples/swb_baseline.lua");
+ if (lua_pcall(L, 1, LUA_MULTRET, 0))
+ {
+ printf("%s\n", luaL_checkstring(L, 1));
+ exit(1);
+ }
+ /* lua stack now: input width, output width, propagator */
+ input = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 1), &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ output = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 2), &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ /* add reference to avoid gc */
+ luaT_pushudata(L, output, nerv_matrix_host_float_tname);
+ luaT_pushudata(L, input, nerv_matrix_host_float_tname);
+ lua_setfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_setfield(L, LUA_GLOBALSINDEX, output_name);
+}
+
+
+void propagate(float for_fun) {
+ int i, j;
+ printf("ok: %d\n", lua_gettop(L));
+ lua_pushvalue(L, 3);
+ /* lua stack now: input width, output width, propagator, propagator */
+ for (i = 0; i < input->nrow; i++) /* nrow is actually 1 */
+ {
+ float *nerv_row = (float *)((char *)input->data.f + i * input->stride);
+ for (j = 0; j < input->ncol; j++)
+ {
+ nerv_row[j] = j * for_fun;
+ }
+ }
+ lua_getfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_getfield(L, LUA_GLOBALSINDEX, output_name);
+ /* lua stack now: input width, output width, propagator, propagator, input, output */
+ if (lua_pcall(L, 2, 0, 0)) /* call propagator with two parameters */
+ {
+ printf("%s\n", luaL_checkstring(L, -1));
+ exit(-1);
+ }
+ /* lua stack now: input width, output width, propagator */
+ printf("## output: %ld %ld ##\n", output->nrow, output->ncol);
+ for (i = 0; i < output->nrow; i++) /* nrow is actually 1 */
+ {
+ float *nerv_row = (float *)((char *)output->data.f + i * output->stride);
+ for (j = 0; j < output->ncol; j++)
+ {
+ printf("%.8f ", nerv_row[j]);
+ }
+ }
+}
+
+void teardown_nerv() {
+ lua_pushnil(L);
+ lua_pushnil(L);
+ lua_setfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_setfield(L, LUA_GLOBALSINDEX, output_name);
+ lua_gc(L, LUA_GCCOLLECT, 0);
+}
+
+int main() {
+ setup_nerv();
+ propagate(1.0);
+ propagate(2.0);
+ propagate(2.0);
+ propagate(3.0);
+ teardown_nerv();
+ return 0;
+}
diff --git a/embedding_example/run.sh b/embedding_example/run.sh
new file mode 100755
index 0000000..e919263
--- /dev/null
+++ b/embedding_example/run.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+export LUA_PATH='/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;./?.lua;/usr/local/share/luajit-2.0.4/?.lua;/usr/local/share/lua/5.1/?.lua;/usr/local/share/lua/5.1/?/init.lua'
+export LUA_CPATH='/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;./?.so;/usr/local/lib/lua/5.1/?.so;/usr/local/lib/lua/5.1/loadall.so'
+./main
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua
new file mode 100644
index 0000000..d80c306
--- /dev/null
+++ b/embedding_example/setup_nerv.lua
@@ -0,0 +1,25 @@
+local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1")
+require 'nerv'
+local arg = {...}
+dofile(arg[1])
+local param_repo = nerv.ParamRepo()
+param_repo:import(gconf.initialized_param, nil, gconf)
+local layer_repo = make_layer_repo(param_repo)
+local network = get_decode_network(layer_repo)
+local global_transf = get_global_transf(layer_repo)
+local batch_size = 1
+network:init(batch_size)
+
+function propagator(input, output)
+ local transformed = nerv.speech_utils.global_transf(
+ gconf.cumat_type.new_from_host(input),
+ global_transf, 0, 0, gconf) -- preprocessing
+ local gpu_input = transformed
+ local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol())
+ network:propagate({gpu_input}, {gpu_output})
+ gpu_output:copy_toh(output)
+ -- collect garbage in-time to save GPU memory
+ collectgarbage("collect")
+end
+
+return network.dim_in[1], network.dim_out[1], propagator