aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/dropout.lua
blob: 42660cce05ea2ec108f720bc1c577a19c0ae65f7 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
local DropoutLayer = nerv.class("nerv.DropoutLayer", "nerv.Layer")

function DropoutLayer:__init(id, global_conf, layer_conf)
    self.id = id
    self.gconf = global_conf
    if self.gconf.use_cpu then
        self.mat_type = self.gconf.mmat_type
    else
        self.mat_type = self.gconf.cumat_type
    end
    self.rate = layer_conf.dropout_rate or global_conf.dropout_rate
    if self.rate == nil then
        nerv.warning("[DropoutLayer:propagate] dropout rate is not set")
    end
    self.dim_in = layer_conf.dim_in
    self.dim_out = layer_conf.dim_out
    self:check_dim_len(1, 1) -- two inputs: nn output and label
end

function DropoutLayer:init(batch_size, chunk_size)
    if self.dim_in[1] ~= self.dim_out[1] then
        nerv.error("mismatching dimensions of input and output")
    end
    if chunk_size == nil then
        chunk_size = 1
    end
    self.mask = {}
    for t = 1, chunk_size do
        self.mask[t] = self.mat_type(batch_size, self.dim_in[1])
    end
end

function DropoutLayer:batch_resize(batch_size, chunk_size)
    if chunk_size == nil then
        chunk_size = 1
    end
    for t = 1, chunk_size do
        if self.mask[t] == nil or self.mask[t]:nrow() ~= batch_size then
            self.mask[t] = self.mat_type(batch_size, self.dim_in[1])
        end
    end
end

function DropoutLayer:propagate(input, output, t)
    if t == nil then
        t = 1
    end
    if self.rate then
        self.mask[t]:rand_uniform()
        -- since we will lose a portion of the actvations, we multiply the
        -- activations by 1 / (1 - rate) to compensate
        self.mask[t]:thres_mask(self.mask[t], self.rate,
                                0, 1 / (1.0 - self.rate))
        output[1]:mul_elem(input[1], self.mask[t])
    else
        output[1]:copy_fromd(input[1])
    end
end

function DropoutLayer:update(bp_err, input, output, t)
    -- no params, therefore do nothing
end

function DropoutLayer:back_propagate(bp_err, next_bp_err, input, output, t)
    if t == nil then
        t = 1
    end
    if self.rate then
        next_bp_err[1]:mul_elem(bp_err[1], self.mask[t])
    else
        next_bp_err[1]:copy_fromd(bp_err[1])
    end
end

function DropoutLayer:get_params()
    return nerv.ParamRepo({})
end