local BiasLayer = nerv.class("nerv.BiasLayer", "nerv.Layer") function BiasLayer:__init(id, global_conf, layer_conf) nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(1, 1) self:bind_params() end function BiasLayer:bind_params() self.bias = self:find_param("bias", self.lconf, self.gconf, nerv.BiasParam, {1, self.dim_out[1]}) end function BiasLayer:init() if self.dim_in[1] ~= self.bias.trans:ncol() then nerv.error("mismatching dimensions of input and bias parameter") end if self.dim_out[1] ~= self.bias.trans:ncol() then nerv.error("mismatching dimensions of output and bias parameter") end end function BiasLayer:batch_resize(batch_size) -- do nothing end function BiasLayer:propagate(input, output) output[1]:copy_from(input[1]) output[1]:add_row(self.bias.trans, 1.0) end function BiasLayer:get_params() return nerv.ParamRepo({self.bias}, self.loc_type) end