aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
blob: fd6e753f91d1c27adfd966841dfe127fca4e7e8c (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')

function RNNLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self:check_dim_len(-1, 1)
    if #self.dim_in == 0 then
        nerv.error('RNN Layer %s has no input', self.id)
    end

    self.activation = layer_conf.activation
    if self.activation == nil then
        self.activation = 'nerv.SigmoidLayer'
    end

    local din = layer_conf.dim_in
    local dout = layer_conf.dim_out[1]

    local pr = layer_conf.pr
    if pr == nil then
        pr = nerv.ParamRepo({}, self.loc_type)
    end

    local layers = {
        ['nerv.AffineLayer'] = {
            main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
        },
        [self.activation] = {
            activation = {dim_in = {dout}, dim_out = {dout}},
        },
        ['nerv.DuplicateLayer'] = {
            duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
        },
    }

    local connections = {
        {'main[1]', 'activation[1]', 0},
        {'activation[1]', 'duplicate[1]', 0},
        {'duplicate[1]', 'main[1]', 1},
        {'duplicate[2]', '<output>[1]', 0},
    }
    for i = 1, #din do
        table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. (i + 1) .. ']', 0})
    end

    self:add_prefix(layers, connections)
    local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
    self.lrepo = layer_repo
    self:graph_init(layer_repo, connections)
end