blob: 2621cdf9b44ce87a29912981cbcb5df9558d92f1 (
plain) (
tree)
|
|
local DuplicateLayer = nerv.class('nerv.DuplicateLayer', 'nerv.Layer')
function DuplicateLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, -1)
if #self.dim_out < 1 then
nerv.error('no output specified')
end
for i = 1, #self.dim_out do
if self.dim_out[i] ~= self.dim_in[1] then
nerv.error('mismatching dimensions of outputs')
end
end
end
function DuplicateLayer:init()
end
function DuplicateLayer:batch_resize()
end
function DuplicateLayer:propagate(input, output)
-- do nothing, use reference copy in nn/network.lua
end
function DuplicateLayer:back_propagate(bp_err, next_bp_err)
next_bp_err[1]:copy_from(bp_err[1])
for i = 2, #self.dim_out do
next_bp_err[1]:add(next_bp_err[1], bp_err[i], 1.0, 1.0)
end
end
function DuplicateLayer:update()
end
function DuplicateLayer:get_params()
return nerv.ParamRepo({}, self.loc_type)
end
|