aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-08-04 11:11:50 +0800
committerDeterminant <[email protected]>2015-08-04 11:11:50 +0800
commit5b16335a903551ffef4fafa88d67146b9131a74e (patch)
tree2691b465eb1ebb905d12b73648fda8012d844704
parentb385d55268b7b327534e227065907a5ea2d2b731 (diff)
...
-rw-r--r--nerv/examples/asr_trainer.lua4
-rw-r--r--nerv/init.lua2
-rw-r--r--nerv/nerv10
-rw-r--r--nerv/nn/layer_dag.lua6
4 files changed, 14 insertions, 8 deletions
diff --git a/nerv/examples/asr_trainer.lua b/nerv/examples/asr_trainer.lua
index 4fa4096..8dfb2ac 100644
--- a/nerv/examples/asr_trainer.lua
+++ b/nerv/examples/asr_trainer.lua
@@ -12,7 +12,7 @@ function build_trainer(ifname)
-- initialize the network
network:init(gconf.batch_size)
gconf.cnt = 0
- err_input = {nerv.CuMatrixFloat(256, 1)}
+ err_input = {nerv.CuMatrixFloat(gconf.batch_size, 1)}
err_input[1]:fill(1)
for data in buffer.get_data, buffer do
-- prine stat periodically
@@ -32,7 +32,7 @@ function build_trainer(ifname)
end
table.insert(input, data[id])
end
- local output = {nerv.CuMatrixFloat(256, 1)}
+ local output = {nerv.CuMatrixFloat(gconf.batch_size, 1)}
err_output = {input[1]:create()}
network:propagate(input, output)
if bp then
diff --git a/nerv/init.lua b/nerv/init.lua
index 89010a7..183ae6d 100644
--- a/nerv/init.lua
+++ b/nerv/init.lua
@@ -1,7 +1,7 @@
require 'libnerv'
function nerv.error(fmt, ...)
- error(nerv.printf("[nerv] internal error: " .. fmt .. "\n", ...))
+ error("[nerv] internal error: " .. fmt .. "\n", ...)
end
function nerv.error_method_not_implemented()
diff --git a/nerv/nerv b/nerv/nerv
index 7571659..e5943aa 100644
--- a/nerv/nerv
+++ b/nerv/nerv
@@ -1,13 +1,13 @@
#! /usr/bin/env luajit
require 'nerv'
print("Greetings")
-if #arg < 1 then
+if #args < 1 then
return
end
-local script = arg[1]
+local script = args[1]
local script_arg = {}
-for i = 2, #arg do
- table.insert(script_arg, arg[i])
+for i = 2, #args do
+ table.insert(script_arg, args[i])
end
-arg = script_arg
+args = script_arg
dofile(script)
diff --git a/nerv/nn/layer_dag.lua b/nerv/nn/layer_dag.lua
index 8e30216..e5c1ac7 100644
--- a/nerv/nn/layer_dag.lua
+++ b/nerv/nn/layer_dag.lua
@@ -177,6 +177,9 @@ end
function DAGLayer:set_inputs(input)
for i = 1, #self.dim_in do
+ if input[i] == nil then
+ nerv.error("some input is not provided");
+ end
local layer = self.inputs[i][1]
local port = self.inputs[i][2]
layer.inputs[port] = input[i]
@@ -185,6 +188,9 @@ end
function DAGLayer:set_outputs(output)
for i = 1, #self.dim_out do
+ if output[i] == nil then
+ nerv.error("some output is not provided");
+ end
local layer = self.outputs[i][1]
local port = self.outputs[i][2]
layer.outputs[port] = output[i]