aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/sigmoid.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/sigmoid.lua')
-rw-r--r--nerv/layer/sigmoid.lua17
1 files changed, 9 insertions, 8 deletions
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua
index 0a8bcdc..5974ffc 100644
--- a/nerv/layer/sigmoid.lua
+++ b/nerv/layer/sigmoid.lua
@@ -1,19 +1,20 @@
local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer")
function SigmoidLayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1)
-end
-
-function SigmoidLayer:init()
if self.dim_in[1] ~= self.dim_out[1] then
nerv.error("mismatching dimensions of input and output")
end
end
+function SigmoidLayer:bind_params()
+ -- do nothing
+end
+
+function SigmoidLayer:init()
+end
+
function SigmoidLayer:batch_resize(batch_size)
-- do nothing
end
@@ -31,5 +32,5 @@ function SigmoidLayer:back_propagate(bp_err, next_bp_err, input, output)
end
function SigmoidLayer:get_params()
- return nerv.ParamRepo({})
+ return nerv.ParamRepo({}, self.loc_type)
end