diff options
Diffstat (limited to 'embedding_example/setup_nerv.lua')
-rw-r--r-- | embedding_example/setup_nerv.lua | 9 |
1 files changed, 4 insertions, 5 deletions
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua index e33a1e7..3ae878d 100644 --- a/embedding_example/setup_nerv.lua +++ b/embedding_example/setup_nerv.lua @@ -1,10 +1,7 @@ -package.path="/home/slhome/mfy43/.luarocks/share/lua/5.1/?.lua;/home/slhome/mfy43/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?.lua;/home/slhome/mfy43/nerv/install/share/lua/5.1/?/init.lua;"..package.path -package.cpath="/home/slhome/mfy43/.luarocks/lib/lua/5.1/?.so;/home/slhome/mfy43/nerv/install/lib/lua/5.1/?.so;"..package.cpath local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1") - -local args = {...} require 'nerv' -dofile(args[1]) +local arg = {...} +dofile(arg[1]) local param_repo = nerv.ParamRepo() param_repo:import(gconf.initialized_param, nil, gconf) local sublayer_repo = make_sublayer_repo(param_repo) @@ -12,6 +9,7 @@ local layer_repo = make_layer_repo(sublayer_repo, param_repo) local network = get_network(layer_repo) local batch_size = 1 network:init(batch_size) + function propagator(input, output) local gpu_input = nerv.CuMatrixFloat(input:nrow(), input:ncol()) local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol()) @@ -23,4 +21,5 @@ function propagator(input, output) -- collect garbage in-time to save GPU memory collectgarbage("collect") end + return network.dim_in[1], network.dim_out[1], propagator |