diff options
author | Determinant <[email protected]> | 2016-03-10 13:40:11 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2016-03-10 13:40:11 +0800 |
commit | a32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (patch) | |
tree | a19f21f8cbadecff7357f9a102f160f5fe699b65 /nerv/layer/sigmoid.lua | |
parent | 4a6872601f05e9ecc059f83fb64a0a4887992b99 (diff) |
major change: clearer param binding semantics; permit rebinding; enable
resuming from previous training
Diffstat (limited to 'nerv/layer/sigmoid.lua')
-rw-r--r-- | nerv/layer/sigmoid.lua | 11 |
1 files changed, 6 insertions, 5 deletions
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua index 0a8bcdc..a9f9749 100644 --- a/nerv/layer/sigmoid.lua +++ b/nerv/layer/sigmoid.lua @@ -1,13 +1,14 @@ local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer") function SigmoidLayer:__init(id, global_conf, layer_conf) - self.id = id - self.gconf = global_conf - self.dim_in = layer_conf.dim_in - self.dim_out = layer_conf.dim_out + nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(1, 1) end +function SigmoidLayer:bind_params() + -- do nothing +end + function SigmoidLayer:init() if self.dim_in[1] ~= self.dim_out[1] then nerv.error("mismatching dimensions of input and output") @@ -31,5 +32,5 @@ function SigmoidLayer:back_propagate(bp_err, next_bp_err, input, output) end function SigmoidLayer:get_params() - return nerv.ParamRepo({}) + return nerv.ParamRepo({}, self.loc_type) end |