blob: 4205b6608cf7a9a6a64368c291c9f3eda2d387d5 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer")
function SoftmaxLayer:__init(id, global_conf, layer_conf)
self.id = id
self.gconf = global_conf
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self:check_dim_len(1, 1) -- two inputs: nn output and label
end
function SoftmaxLayer:init(batch_size)
if self.dim_in[1] ~= self.dim_out[1] then
nerv.error("mismatching dimensions of input and output")
end
end
function SoftmaxLayer:batch_resize(batch_size)
-- do nothing
end
function SoftmaxLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
function SoftmaxLayer:propagate(input, output)
output[1]:softmax(input[1])
end
function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output)
nerv.error_method_not_implemented()
end
function SoftmaxLayer:get_params()
return nerv.ParamRepo({})
end
|