aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/softmax_ce.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/softmax_ce.lua')
-rw-r--r--nerv/layer/softmax_ce.lua11
1 files changed, 8 insertions, 3 deletions
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index 9071e86..31a2ad7 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -3,6 +3,11 @@ local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer")
function SoftmaxCELayer:__init(id, global_conf, layer_conf)
self.id = id
self.gconf = global_conf
+ if self.gconf.use_cpu then
+ self.mat_type = self.gconf.mmat_type
+ else
+ self.mat_type = self.gconf.cumat_type
+ end
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self.compressed = layer_conf.compressed
@@ -19,13 +24,13 @@ function SoftmaxCELayer:init(batch_size)
self.total_ce = 0.0
self.total_correct = 0
self.total_frames = 0
- self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.softmax = self.mat_type(batch_size, self.dim_in[1])
self.ce = self.softmax:create()
end
function SoftmaxCELayer:batch_resize(batch_size)
if self.softmax:nrow() ~= batch_resize then
- self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.softmax = self.mat_type(batch_size, self.dim_in[1])
self.ce = self.softmax:create()
end
end
@@ -46,7 +51,7 @@ function SoftmaxCELayer:propagate(input, output)
ce:mul_elem(ce, label)
ce = ce:rowsum()
if output[1] ~= nil then
- output[1]:copy_fromd(ce)
+ output[1]:copy_from(ce)
end
-- add total ce
self.total_ce = self.total_ce - ce:colsum()[0][0]