aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/softmax.lua
blob: 81dcaccf67a19c74268af2a4a2b387d931adcdd8 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
local SoftmaxLayer = nerv.class("nerv.SoftmaxLayer", "nerv.Layer")

function SoftmaxLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self:check_dim_len(1, 1) -- two inputs: nn output and label
end

function SoftmaxLayer:bind_params()
    -- do nothing
end

function SoftmaxLayer:init(batch_size)
    if self.dim_in[1] ~= self.dim_out[1] then
        nerv.error("mismatching dimensions of input and output")
    end
end

function SoftmaxLayer:batch_resize(batch_size)
    -- do nothing
end

function SoftmaxLayer:update(bp_err, input, output)
    -- no params, therefore do nothing
end

function SoftmaxLayer:propagate(input, output)
    output[1]:softmax(input[1])
end

function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output)
    local nbe = next_bp_err[1]
    nbe:mul_elem(bp_err[1], output[1])
    local offset = nbe:rowsum()
    nbe:copy_from(bp_err[1])
    nbe:add_row(offset, -1.0)
    nbe:mul_elem(nbe, output[1])
end

function SoftmaxLayer:get_params()
    return nerv.ParamRepo({}, self.loc_type)
end