diff options
author | Determinant <[email protected]> | 2015-08-04 15:51:53 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-08-04 15:51:53 +0800 |
commit | e20b60f659b08c46b9da0591ee489803f3f3d300 (patch) | |
tree | f751af1bbe04d34eb85281b148d4c549b84ab463 | |
parent | 0223b6b88620d9235fc47818aaa5c86ae81d38d9 (diff) |
...
-rw-r--r-- | nerv/layer/softmax.lua | 31 | ||||
-rw-r--r-- | nerv/nerv | 10 |
2 files changed, 36 insertions, 5 deletions
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua new file mode 100644 index 0000000..e979ebf --- /dev/null +++ b/nerv/layer/softmax.lua @@ -0,0 +1,31 @@ +local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer") + +function SoftmaxLayer:__init(id, global_conf, layer_conf) + self.id = id + self.gconf = global_conf + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self:check_dim_len(1, 1) -- two inputs: nn output and label +end + +function SoftmaxLayer:init(batch_size) + if self.dim_in[1] ~= self.dim_out[1] then + nerv.error("mismatching dimensions of input and output") + end +end + +function SoftmaxLayer:update(bp_err, input, output) + -- no params, therefore do nothing +end + +function SoftmaxLayer:propagate(input, output) + output[1]:softmax(input[1]) +end + +function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output) + nerv.error_method_not_implemented() +end + +function SoftmaxLayer:get_params() + return nerv.ParamRepo({}) +end @@ -1,13 +1,13 @@ #! /usr/bin/env luajit require 'nerv' print("Greetings") -if #args < 1 then +if #arg < 1 then return end -local script = args[1] +local script = arg[1] local script_arg = {} -for i = 2, #args do - table.insert(script_arg, args[i]) +for i = 2, #arg do + table.insert(script_arg, arg[i]) end -args = script_arg +arg = script_arg dofile(script) |