aboutsummaryrefslogtreecommitdiff
path: root/embedding_example
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-08-04 17:27:09 +0800
committerDeterminant <ted.sybil@gmail.com>2015-08-04 17:27:09 +0800
commitc3effaac9e9965371a73f9c84c2a4e0880f32138 (patch)
treee6d68508dbafeb1840d97fc3709acfc3c6b74db3 /embedding_example
parente20b60f659b08c46b9da0591ee489803f3f3d300 (diff)
fix gc issues
Diffstat (limited to 'embedding_example')
-rw-r--r--embedding_example/Makefile6
-rw-r--r--embedding_example/main.c28
-rwxr-xr-xembedding_example/run.sh4
-rw-r--r--embedding_example/setup_nerv.lua9
4 files changed, 30 insertions, 17 deletions
diff --git a/embedding_example/Makefile b/embedding_example/Makefile
index e4ee314..3420b30 100644
--- a/embedding_example/Makefile
+++ b/embedding_example/Makefile
@@ -2,6 +2,12 @@ CFLAG += -I ../install/include/luajit-2.0/ -I ../install/include/nerv/
LDFLAG += -L../install/lib/ -lluajit-5.1 -Wl,-rpath=../install/lib/ -lluaT -lnervcore
GCC := gcc
+.PHONY: FORCE
+
+FORCE: ../install/bin/luarocks
+ echo "#!/bin/bash" > run.sh
+ $< path >> run.sh
+ echo "./main" >> run.sh
main: main.o
$(GCC) -o $@ $< $(LDFLAG)
diff --git a/embedding_example/main.c b/embedding_example/main.c
index 4c6459c..b3c9bf2 100644
--- a/embedding_example/main.c
+++ b/embedding_example/main.c
@@ -7,6 +7,8 @@
#include <stdio.h>
const char *nerv_matrix_host_float_tname = "nerv.MMatrixFloat";
+const char *input_name = "_nerv_embed_input";
+const char *output_name = "_nerv_embed_output";
extern Matrix *nerv_matrix_host_float_create(long nrow, long ncol, Status *status);
extern void nerv_matrix_host_float_data_retain(Matrix *self);
extern void nerv_matrix_host_float_data_free(Matrix *self, Status *status);
@@ -31,6 +33,11 @@ void setup_nerv() {
NERV_LUA_CHECK_STATUS(L, status);
output = nerv_matrix_host_float_create(1, luaL_checkinteger(L, 2), &status);
NERV_LUA_CHECK_STATUS(L, status);
+ /* add reference to avoid gc */
+ luaT_pushudata(L, output, nerv_matrix_host_float_tname);
+ luaT_pushudata(L, input, nerv_matrix_host_float_tname);
+ lua_setfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_setfield(L, LUA_GLOBALSINDEX, output_name);
}
@@ -47,12 +54,8 @@ void propagate(float for_fun) {
nerv_row[j] = j * for_fun;
}
}
- /* avoid gc */
- nerv_matrix_host_float_data_retain(input);
- nerv_matrix_host_float_data_retain(output);
-
- luaT_pushudata(L, input, nerv_matrix_host_float_tname);
- luaT_pushudata(L, output, nerv_matrix_host_float_tname);
+ lua_getfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_getfield(L, LUA_GLOBALSINDEX, output_name);
/* lua stack now: input width, output width, propagator, propagator, input, output */
if (lua_pcall(L, 2, 0, 0)) /* call propagator with two parameters */
{
@@ -60,7 +63,7 @@ void propagate(float for_fun) {
exit(-1);
}
/* lua stack now: input width, output width, propagator */
- printf("## caller ##\n");
+ printf("## output: %ld %ld ##\n", output->nrow, output->ncol);
for (i = 0; i < output->nrow; i++) /* nrow is actually 1 */
{
float *nerv_row = (float *)((char *)output->data.f + i * output->stride);
@@ -68,21 +71,22 @@ void propagate(float for_fun) {
{
printf("%.8f ", nerv_row[j]);
}
- printf("\n");
}
}
void teardown_nerv() {
- nerv_matrix_host_float_data_free(input, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- nerv_matrix_host_float_data_free(output, &status);
- NERV_LUA_CHECK_STATUS(L, status);
+ lua_pushnil(L);
+ lua_pushnil(L);
+ lua_setfield(L, LUA_GLOBALSINDEX, input_name);
+ lua_setfield(L, LUA_GLOBALSINDEX, output_name);
+ lua_gc(L, LUA_GCCOLLECT, 0);
}
int main() {
setup_nerv();
propagate(1.0);
propagate(2.0);
+ propagate(2.0);
propagate(3.0);
teardown_nerv();
return 0;
diff --git a/embedding_example/run.sh b/embedding_example/run.sh
new file mode 100755
index 0000000..e919263
--- /dev/null
+++ b/embedding_example/run.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+export LUA_PATH='/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;./?.lua;/usr/local/share/luajit-2.0.4/?.lua;/usr/local/share/lua/5.1/?.lua;/usr/local/share/lua/5.1/?/init.lua'
+export LUA_CPATH='/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;./?.so;/usr/local/lib/lua/5.1/?.so;/usr/local/lib/lua/5.1/loadall.so'
+./main
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua
index e33a1e7..3ae878d 100644
--- a/embedding_example/setup_nerv.lua
+++ b/embedding_example/setup_nerv.lua
@@ -1,10 +1,7 @@
-package.path="/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;"..package.path
-package.cpath="/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;"..package.cpath
local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1")
-
-local args = {...}
require 'nerv'
-dofile(args[1])
+local arg = {...}
+dofile(arg[1])
local param_repo = nerv.ParamRepo()
param_repo:import(gconf.initialized_param, nil, gconf)
local sublayer_repo = make_sublayer_repo(param_repo)
@@ -12,6 +9,7 @@ local layer_repo = make_layer_repo(sublayer_repo, param_repo)
local network = get_network(layer_repo)
local batch_size = 1
network:init(batch_size)
+
function propagator(input, output)
local gpu_input = nerv.CuMatrixFloat(input:nrow(), input:ncol())
local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol())
@@ -23,4 +21,5 @@ function propagator(input, output)
-- collect garbage in-time to save GPU memory
collectgarbage("collect")
end
+
return network.dim_in[1], network.dim_out[1], propagator