aboutsummaryrefslogtreecommitdiff
path: root/lua/config.lua
blob: 1ec1198bcd47fb6244de12ab105a51a5e66d00a1 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
function get_global_conf()
    local global_conf = {
        lrate = 0.15,
        wcost = 1e-5,
        momentum = 0,
        clip = 5,
        cumat_type = nerv.CuMatrixFloat,
        mmat_type = nerv.MMatrixFloat,
        vocab_size = 10000,
        nn_act_default = 0,
        hidden_size = 300,
        layer_num = 1,
        chunk_size = 15,
        batch_size = 20,
        max_iter = 3,
        param_random = function() return (math.random() / 5 - 0.1) end,
        dropout = 0.5,
        timer = nerv.Timer(),
        pr = nerv.ParamRepo(),
    }
    return global_conf
end

function get_layers(global_conf)
    local pr = global_conf.pr
    local layers = {
        ['nerv.LSTMLayer'] = {},
        ['nerv.DropoutLayer'] = {},
        ['nerv.SelectLinearLayer'] = {
            ['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr},
        },
        ['nerv.CombinerLayer'] = {},
        ['nerv.AffineLayer'] = {
            output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr}
        },
        ['nerv.SoftmaxCELayer'] = {
            softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true},
        },
    }
    for i = 1, global_conf.layer_num do
        layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr}
        layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}}
        layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}}
    end
    return layers
end

function get_connections(global_conf)
    local connections = {
        {'<input>[1]', 'select[1]', 0},
        {'select[1]', 'lstm1[1]', 0},
        {'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0},
        {'output[1]', 'softmax[1]', 0},
        {'<input>[2]', 'softmax[2]', 0},
        {'softmax[1]', '<output>[1]', 0},
    }
    for i = 1, global_conf.layer_num do
        table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0})
        table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1})
        table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1})
        table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0})
        if i > 1 then
            table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0})
        end
    end
    return connections
end