diff options
author | Determinant <[email protected]> | 2015-06-09 17:27:09 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-06-09 17:27:09 +0800 |
commit | 480de75f52a4f185546978da023a77e27a8644c3 (patch) | |
tree | 8802553466b18503b255944ef4f30fa81157fe03 | |
parent | 8a8dcb6c6bcdcd96984d8a9ecf9032287beeb76d (diff) |
fix #20
-rw-r--r-- | layer/init.lua | 2 | ||||
-rw-r--r-- | layer/softmax_ce.lua | 2 |
2 files changed, 2 insertions, 2 deletions
diff --git a/layer/init.lua b/layer/init.lua index 3011f8e..844f46b 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -29,7 +29,7 @@ function Layer:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end -function Layer:init(id) +function Layer:init() nerv.error_method_not_implemented() end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua index 4ea7421..2e1f5fb 100644 --- a/layer/softmax_ce.lua +++ b/layer/softmax_ce.lua @@ -13,7 +13,7 @@ function SoftmaxCELayer:__init(id, global_conf, layer_conf) end function SoftmaxCELayer:init() - if self.dim_in[1] ~= self.dim_in[2] then + if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then nerv.error("mismatching dimensions of previous network output and labels") end self.total_ce = 0.0 |