From c3effaac9e9965371a73f9c84c2a4e0880f32138 Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 4 Aug 2015 17:27:09 +0800 Subject: fix gc issues --- embedding_example/Makefile | 6 ++++++ embedding_example/main.c | 28 ++++++++++++++++------------ embedding_example/run.sh | 4 ++++ embedding_example/setup_nerv.lua | 9 ++++----- 4 files changed, 30 insertions(+), 17 deletions(-) create mode 100755 embedding_example/run.sh (limited to 'embedding_example') diff --git a/embedding_example/Makefile b/embedding_example/Makefile index e4ee314..3420b30 100644 --- a/embedding_example/Makefile +++ b/embedding_example/Makefile @@ -2,6 +2,12 @@ CFLAG += -I ../install/include/luajit-2.0/ -I ../install/include/nerv/ LDFLAG += -L../install/lib/ -lluajit-5.1 -Wl,-rpath=../install/lib/ -lluaT -lnervcore GCC := gcc +.PHONY: FORCE + +FORCE: ../install/bin/luarocks + echo "#!/bin/bash" > run.sh + $< path >> run.sh + echo "./main" >> run.sh main: main.o $(GCC) -o $@ $< $(LDFLAG) diff --git a/embedding_example/main.c b/embedding_example/main.c index 4c6459c..b3c9bf2 100644 --- a/embedding_example/main.c +++ b/embedding_example/main.c @@ -7,6 +7,8 @@ #include const char *nerv_matrix_host_float_tname = "nerv.MMatrixFloat"; +const char *input_name = "_nerv_embed_input"; +const char *output_name = "_nerv_embed_output"; extern Matrix *nerv_matrix_host_float_create(long nrow, long ncol, Status *status); extern void nerv_matrix_host_float_data_retain(Matrix *self); extern void nerv_matrix_host_float_data_free(Matrix *self, Status *status); @@ -31,6 +33,11 @@ void setup_nerv() { NERV_LUA_CHECK_STATUS(L, status); output = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 2), &status); NERV_LUA_CHECK_STATUS(L, status); + /* add reference to avoid gc */ + luaT_pushudata(L, output, nerv_matrix_host_float_tname); + luaT_pushudata(L, input, nerv_matrix_host_float_tname); + lua_setfield(L, LUA_GLOBALSINDEX, input_name); + lua_setfield(L, LUA_GLOBALSINDEX, output_name); } @@ -47,12 +54,8 @@ void propagate(float for_fun) { nerv_row[j] = j * for_fun; } } - /* avoid gc */ - nerv_matrix_host_float_data_retain(input); - nerv_matrix_host_float_data_retain(output); - - luaT_pushudata(L, input, nerv_matrix_host_float_tname); - luaT_pushudata(L, output, nerv_matrix_host_float_tname); + lua_getfield(L, LUA_GLOBALSINDEX, input_name); + lua_getfield(L, LUA_GLOBALSINDEX, output_name); /* lua stack now: input width, output width, propagator, propagator, input, output */ if (lua_pcall(L, 2, 0, 0)) /* call propagator with two parameters */ { @@ -60,7 +63,7 @@ void propagate(float for_fun) { exit(-1); } /* lua stack now: input width, output width, propagator */ - printf("## caller ##\n"); + printf("## output: %ld %ld ##\n", output->nrow, output->ncol); for (i = 0; i < output->nrow; i++) /* nrow is actually 1 */ { float *nerv_row = (float *)((char *)output->data.f + i * output->stride); @@ -68,21 +71,22 @@ void propagate(float for_fun) { { printf("%.8f ", nerv_row[j]); } - printf("\n"); } } void teardown_nerv() { - nerv_matrix_host_float_data_free(input, &status); - NERV_LUA_CHECK_STATUS(L, status); - nerv_matrix_host_float_data_free(output, &status); - NERV_LUA_CHECK_STATUS(L, status); + lua_pushnil(L); + lua_pushnil(L); + lua_setfield(L, LUA_GLOBALSINDEX, input_name); + lua_setfield(L, LUA_GLOBALSINDEX, output_name); + lua_gc(L, LUA_GCCOLLECT, 0); } int main() { setup_nerv(); propagate(1.0); propagate(2.0); + propagate(2.0); propagate(3.0); teardown_nerv(); return 0; diff --git a/embedding_example/run.sh b/embedding_example/run.sh new file mode 100755 index 0000000..e919263 --- /dev/null +++ b/embedding_example/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash +export LUA_PATH='/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;./?.lua;/usr/local/share/luajit-2.0.4/?.lua;/usr/local/share/lua/5.1/?.lua;/usr/local/share/lua/5.1/?/init.lua' +export LUA_CPATH='/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;./?.so;/usr/local/lib/lua/5.1/?.so;/usr/local/lib/lua/5.1/loadall.so' +./main diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua index e33a1e7..3ae878d 100644 --- a/embedding_example/setup_nerv.lua +++ b/embedding_example/setup_nerv.lua @@ -1,10 +1,7 @@ -package.path="/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;"..package.path -package.cpath="/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;"..package.cpath local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1") - -local args = {...} require 'nerv' -dofile(args[1]) +local arg = {...} +dofile(arg[1]) local param_repo = nerv.ParamRepo() param_repo:import(gconf.initialized_param, nil, gconf) local sublayer_repo = make_sublayer_repo(param_repo) @@ -12,6 +9,7 @@ local layer_repo = make_layer_repo(sublayer_repo, param_repo) local network = get_network(layer_repo) local batch_size = 1 network:init(batch_size) + function propagator(input, output) local gpu_input = nerv.CuMatrixFloat(input:nrow(), input:ncol()) local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol()) @@ -23,4 +21,5 @@ function propagator(input, output) -- collect garbage in-time to save GPU memory collectgarbage("collect") end + return network.dim_in[1], network.dim_out[1], propagator -- cgit v1.2.3-70-g09d2