aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
blob: 333be9e72389b891b741c08fa51847be532d1b46 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')

function RNNLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self:check_dim_len(1, 1)

    if layer_conf.activation == nil then
        layer_conf.activation = 'nerv.SigmoidLayer'
    end

    local din = layer_conf.dim_in[1]
    local dout = layer_conf.dim_out[1]

    local pr = layer_conf.pr
    if pr == nil then
        pr = nerv.ParamRepo({}, self.loc_type)
    end

    local layers = {
        ['nerv.AffineLayer'] = {
            main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
        },
        [layer_conf.activation] = {
            activation = {dim_in = {dout}, dim_out = {dout}},
        },
        ['nerv.DuplicateLayer'] = {
            duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
        },
    }

    local connections = {
        {'<input>[1]', 'main[1]', 0},
        {'main[1]', 'activation[1]', 0},
        {'activation[1]', 'duplicate[1]', 0},
        {'duplicate[1]', 'main[2]', 1},
        {'duplicate[2]', '<output>[1]', 0},
    }

    self:add_prefix(layers, connections)
    local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
    self:graph_init(layer_repo, connections)
end