aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/combiner.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/combiner.lua')
-rw-r--r--nerv/layer/combiner.lua13
1 files changed, 9 insertions, 4 deletions
diff --git a/nerv/layer/combiner.lua b/nerv/layer/combiner.lua
index 1bcfdfb..22e89a9 100644
--- a/nerv/layer/combiner.lua
+++ b/nerv/layer/combiner.lua
@@ -6,6 +6,11 @@ function CombinerLayer:__init(id, global_conf, layer_conf)
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
+ if self.gconf.use_cpu then
+ self.mat_type = self.gconf.mmat_type
+ else
+ self.mat_type = self.gconf.cumat_type
+ end
self:check_dim_len(#self.lambda, -1)
if #self.dim_in < 1 then
nerv.error("no input specified")
@@ -27,12 +32,12 @@ function CombinerLayer:init(batch_size)
nerv.error("mismatching dimensions of inputs/outputs")
end
end
- self.sum = self.gconf.cumat_type(batch_size, dim)
+ self.sum = self.mat_type(batch_size, dim)
end
function CombinerLayer:batch_resize(batch_size)
if self.sum:nrow() ~= batch_size then
- self.sum = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.sum = self.mat_type(batch_size, self.dim_in[1])
end
end
@@ -45,13 +50,13 @@ function CombinerLayer:propagate(input, output)
output[1]:add(output[1], input[i], 1.0, self.lambda[i])
end
for i = 2, #self.dim_out do
- output[i]:copy_fromd(output[1])
+ output[i]:copy_from(output[1])
end
end
function CombinerLayer:back_propagate(bp_err, next_bp_err, input, output)
local sum = self.sum
- sum:copy_fromd(bp_err[1])
+ sum:copy_from(bp_err[1])
for i = 2, #self.dim_out do
sum:add(sum, bp_err[i], 1.0, 1.0)
end