From 480de75f52a4f185546978da023a77e27a8644c3 Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 9 Jun 2015 17:27:09 +0800 Subject: fix #20 --- layer/init.lua | 2 +- layer/softmax_ce.lua | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/layer/init.lua b/layer/init.lua index 3011f8e..844f46b 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -29,7 +29,7 @@ function Layer:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end -function Layer:init(id) +function Layer:init() nerv.error_method_not_implemented() end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua index 4ea7421..2e1f5fb 100644 --- a/layer/softmax_ce.lua +++ b/layer/softmax_ce.lua @@ -13,7 +13,7 @@ function SoftmaxCELayer:__init(id, global_conf, layer_conf) end function SoftmaxCELayer:init() - if self.dim_in[1] ~= self.dim_in[2] then + if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then nerv.error("mismatching dimensions of previous network output and labels") end self.total_ce = 0.0 -- cgit v1.2.3