aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-08-04 15:51:53 +0800
committerDeterminant <ted.sybil@gmail.com>2015-08-04 15:51:53 +0800
commite20b60f659b08c46b9da0591ee489803f3f3d300 (patch)
treef751af1bbe04d34eb85281b148d4c549b84ab463
parent0223b6b88620d9235fc47818aaa5c86ae81d38d9 (diff)
...
-rw-r--r--nerv/layer/softmax.lua31
-rw-r--r--nerv/nerv10
2 files changed, 36 insertions, 5 deletions
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua
new file mode 100644
index 0000000..e979ebf
--- /dev/null
+++ b/nerv/layer/softmax.lua
@@ -0,0 +1,31 @@
+local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer")
+
+function SoftmaxLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1) -- two inputs: nn output and label
+end
+
+function SoftmaxLayer:init(batch_size)
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
+end
+
+function SoftmaxLayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function SoftmaxLayer:propagate(input, output)
+ output[1]:softmax(input[1])
+end
+
+function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output)
+ nerv.error_method_not_implemented()
+end
+
+function SoftmaxLayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/nerv b/nerv/nerv
index e5943aa..7571659 100644
--- a/nerv/nerv
+++ b/nerv/nerv
@@ -1,13 +1,13 @@
#! /usr/bin/env luajit
require 'nerv'
print("Greetings")
-if #args < 1 then
+if #arg < 1 then
return
end
-local script = args[1]
+local script = arg[1]
local script_arg = {}
-for i = 2, #args do
- table.insert(script_arg, args[i])
+for i = 2, #arg do
+ table.insert(script_arg, arg[i])
end
-args = script_arg
+arg = script_arg
dofile(script)