summaryrefslogtreecommitdiff
path: root/layer/softmax_ce.lua
blob: 37d286408a7a675b94413dd083a09ef45583ea74 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer")

function SoftmaxCELayer:__init(id, global_conf)
    self.id = id
    self.gconf = global_conf
end

function SoftmaxCELayer:init()
    self.total_ce = 0.0
    self.total_frames = 0
end

function SoftmaxCELayer:update(bp_err, input, output)
    -- no params, therefore do nothing
end

function SoftmaxCELayer:propagate(input, output)
    local soutput = input[0]:create()  -- temporary value for calc softmax
    self.soutput = soutput
    soutput:softmax(input[0])
    local ce = soutput:create()
    ce:log_elem(soutput)
    ce:mul_elem(ce, input[1])
     -- add total ce
    self.total_ce = self.total_ce - ce:rowsum():colsum()[0]
    self.total_frames = self.total_frames + soutput:nrow()
end

function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output)
    -- softmax output - label
    next_bp_err[0]:add(self.soutput, input[1], 1.0, -1.0)
end