blob: f7a516314d865cb8e5ad1b9e375226fefad96011 (
plain) (
tree)
|
|
local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer")
function SoftmaxLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1) -- two inputs: nn output and label
end
function SoftmaxLayer:bind_params()
-- do nothing
end
function SoftmaxLayer:init(batch_size)
if self.dim_in[1] ~= self.dim_out[1] then
nerv.error("mismatching dimensions of input and output")
end
end
function SoftmaxLayer:batch_resize(batch_size)
-- do nothing
end
function SoftmaxLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
function SoftmaxLayer:propagate(input, output)
output[1]:softmax(input[1])
end
function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output)
nerv.error_method_not_implemented()
end
function SoftmaxLayer:get_params()
return nerv.ParamRepo({}, self.loc_type)
end
|