aboutsummaryrefslogtreecommitdiff
path: root/layer/sigmoid.lua
diff options
context:
space:
mode:
Diffstat (limited to 'layer/sigmoid.lua')
-rw-r--r--layer/sigmoid.lua31
1 files changed, 0 insertions, 31 deletions
diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua
deleted file mode 100644
index dfd09eb..0000000
--- a/layer/sigmoid.lua
+++ /dev/null
@@ -1,31 +0,0 @@
-local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer")
-
-function SigmoidLayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self:check_dim_len(1, 1)
-end
-
-function SigmoidLayer:init()
- if self.dim_in[1] ~= self.dim_out[1] then
- nerv.error("mismatching dimensions of input and output")
- end
-end
-
-function SigmoidLayer:update(bp_err, input, output)
- -- no params, therefore do nothing
-end
-
-function SigmoidLayer:propagate(input, output)
- output[1]:sigmoid(input[1])
-end
-
-function SigmoidLayer:back_propagate(bp_err, next_bp_err, input, output)
- next_bp_err[1]:sigmoid_grad(bp_err[1], output[1])
-end
-
-function SigmoidLayer:get_params()
- return nerv.ParamRepo({})
-end