aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/lstm_gate.lua
blob: 39a3ff7b580d72d97f266dbecbc22b998d45bbb5 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
local LSTMGateLayer = nerv.class('nerv.LSTMGateLayer', 'nerv.Layer')
-- NOTE: this is a full matrix gate

function LSTMGateLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self.param_type = layer_conf.param_type
    self:check_dim_len(-1, 1) --accept multiple inputs
    self:bind_params()
end

function LSTMGateLayer:bind_params()
    local lconf = self.lconf
    lconf.no_update_ltp1 = lconf.no_update_ltp1 or lconf.no_update_ltp
    for i = 1, #self.dim_in do
        local pid = "ltp" .. i
        local pid_list = i == 1 and {pid, "ltp"} or pid
        self["ltp" .. i] = self:find_param(pid_list, lconf, self.gconf,
                                            nerv.LinearTransParam,
                                            {self.dim_in[i], self.dim_out[1]})
        if self.param_type[i] == 'D' then
            self["ltp" .. i].trans:diagonalize()
        end
        local no_update = lconf["no_update_ltp" .. i]
        if (no_update ~= nil) and no_update or lconf.no_update_all then
            self["ltp" .. i].no_update = true
        end
    end
    self.ltp = self.ltp1 -- alias of ltp1
    self.bp = self:find_param("bp", lconf, self.gconf,
                                nerv.BiasParam, {1, self.dim_out[1]},
                                nerv.Param.gen_zero)
    local no_update = lconf["no_update_bp"]
    if (no_update ~= nil) and no_update or lconf.no_update_all then
        self.bp.no_update = true
    end
end

function LSTMGateLayer:init(batch_size)
    if self.dim_out[1] ~= self.bp.trans:ncol() then
        nerv.error("mismatching dimensions of linear transform and bias paramter")
    end
    for i = 1, #self.dim_in do
        if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
            nerv.error("mismatching dimensions of linear transform parameter and input")
        end
        if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
            nerv.error("mismatching dimensions of linear transform parameter and output")
        end
        self["ltp" .. i]:train_init()
    end
    self.bp:train_init()
    self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end

function LSTMGateLayer:batch_resize(batch_size)
    if self.err_m:nrow() ~= batch_size then
        self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
    end
end

function LSTMGateLayer:propagate(input, output)
    -- apply linear transform
    output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N')
    for i = 2, #self.dim_in do
        output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
    end
    -- add bias
    output[1]:add_row(self.bp.trans, 1.0)
    output[1]:sigmoid(output[1])
end

function LSTMGateLayer:back_propagate(bp_err, next_bp_err, input, output)
    self.err_bakm:sigmoid_grad(bp_err[1], output[1])
    for i = 1, #self.dim_in do
        next_bp_err[i]:mul(self.err_bakm, self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
        self["ltp" .. i]:back_propagate_by_err_input(self.err_bakm, input[i])
    end
    self.bp:back_propagate_by_gradient(self.err_bakm:colsum())
end

function LSTMGateLayer:update()
    for i = 1, #self.dim_in do
        self["ltp" .. i]:update_by_err_input()
        if self.param_type[i] == 'D' then
            self["ltp" .. i].trans:diagonalize()
        end
    end
    self.bp:update_by_gradient()
end

function LSTMGateLayer:get_params()
    local pr = nerv.ParamRepo({self.bp}, self.loc_type)
    for i = 1, #self.dim_in do
        pr:add(self["ltp" .. i])
    end
    return pr
end