aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/softmax_ce.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/softmax_ce.lua')
-rw-r--r--nerv/layer/softmax_ce.lua16
1 files changed, 6 insertions, 10 deletions
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index d7d650e..7b4a80c 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -1,15 +1,7 @@
local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer")
function SoftmaxCELayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- if self.gconf.use_cpu then
- self.mat_type = self.gconf.mmat_type
- else
- self.mat_type = self.gconf.cumat_type
- end
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
self.compressed = layer_conf.compressed
if self.compressed == nil then
self.compressed = false
@@ -17,6 +9,10 @@ function SoftmaxCELayer:__init(id, global_conf, layer_conf)
self:check_dim_len(2, -1) -- two inputs: nn output and label
end
+function SoftmaxCELayer:bind_params()
+ -- do nothing
+end
+
function SoftmaxCELayer:init(batch_size, chunk_size)
if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then
nerv.error("mismatching dimensions of previous network output and labels")
@@ -94,5 +90,5 @@ function SoftmaxCELayer:back_propagate(bp_err, next_bp_err, input, output, t)
end
function SoftmaxCELayer:get_params()
- return nerv.ParamRepo({})
+ return nerv.ParamRepo({}, self.loc_type)
end