aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/sigmoid.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-11 17:35:13 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-11 17:35:13 +0800
commit13729e83219cd90e33f329c49a50f6f4a4420721 (patch)
treecf5c43f1ddad7bc2430ea8191f943b0783e5fc2c /nerv/layer/sigmoid.lua
parenta32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (diff)
parenta54332ce81129e81fbb1d041ec41aa5955868c5e (diff)
Merge branch 'liuq901-master'
Diffstat (limited to 'nerv/layer/sigmoid.lua')
-rw-r--r--nerv/layer/sigmoid.lua6
1 files changed, 3 insertions, 3 deletions
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua
index a9f9749..5974ffc 100644
--- a/nerv/layer/sigmoid.lua
+++ b/nerv/layer/sigmoid.lua
@@ -3,6 +3,9 @@ local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer")
function SigmoidLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1)
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
end
function SigmoidLayer:bind_params()
@@ -10,9 +13,6 @@ function SigmoidLayer:bind_params()
end
function SigmoidLayer:init()
- if self.dim_in[1] ~= self.dim_out[1] then
- nerv.error("mismatching dimensions of input and output")
- end
end
function SigmoidLayer:batch_resize(batch_size)