local ReluLayer = nerv.class('nerv.ReluLayer', 'nerv.Layer') function ReluLayer:__init(id, global_conf, layer_conf) nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(1, 1) end function ReluLayer:bind_params() end function ReluLayer:init() if self.dim_in[1] ~= self.dim_out[1] then nerv.error('mismatching dimensions of input and output') end end function ReluLayer:batch_resize(batch_size) end function ReluLayer:update() end function ReluLayer:propagate(input, output) output[1]:relu(input[1]) end function ReluLayer:back_propagate(bp_err, next_bp_err, input, output) next_bp_err[1]:relu_grad(bp_err[1], output[1]) end function ReluLayer:get_params() return nerv.ParamRepo({}, self.loc_type) end