diff options
Diffstat (limited to 'layer')
-rw-r--r-- | layer/init.lua | 2 | ||||
-rw-r--r-- | layer/softmax_ce.lua | 2 |
2 files changed, 2 insertions, 2 deletions
diff --git a/layer/init.lua b/layer/init.lua index 3011f8e..844f46b 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -29,7 +29,7 @@ function Layer:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end -function Layer:init(id) +function Layer:init() nerv.error_method_not_implemented() end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua index 4147f0c..2e1f5fb 100644 --- a/layer/softmax_ce.lua +++ b/layer/softmax_ce.lua @@ -13,7 +13,7 @@ function SoftmaxCELayer:__init(id, global_conf, layer_conf) end function SoftmaxCELayer:init() - if self.dim_in[1] ~= self.dim_in[1] then + if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then nerv.error("mismatching dimensions of previous network output and labels") end self.total_ce = 0.0 |