aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/softmax.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-08-04 15:51:53 +0800
committerDeterminant <ted.sybil@gmail.com>2015-08-04 15:51:53 +0800
commite20b60f659b08c46b9da0591ee489803f3f3d300 (patch)
treef751af1bbe04d34eb85281b148d4c549b84ab463 /nerv/layer/softmax.lua
parent0223b6b88620d9235fc47818aaa5c86ae81d38d9 (diff)
...
Diffstat (limited to 'nerv/layer/softmax.lua')
-rw-r--r--nerv/layer/softmax.lua31
1 files changed, 31 insertions, 0 deletions
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua
new file mode 100644
index 0000000..e979ebf
--- /dev/null
+++ b/nerv/layer/softmax.lua
@@ -0,0 +1,31 @@
+local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer")
+
+function SoftmaxLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1) -- two inputs: nn output and label
+end
+
+function SoftmaxLayer:init(batch_size)
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
+end
+
+function SoftmaxLayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function SoftmaxLayer:propagate(input, output)
+ output[1]:softmax(input[1])
+end
+
+function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output)
+ nerv.error_method_not_implemented()
+end
+
+function SoftmaxLayer:get_params()
+ return nerv.ParamRepo({})
+end