function get_global_conf()
local global_conf = {
lrate = 0.15,
wcost = 1e-5,
momentum = 0,
clip = 5,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
vocab_size = 10000,
nn_act_default = 0,
hidden_size = 300,
layer_num = 1,
chunk_size = 15,
batch_size = 20,
max_iter = 1,
param_random = function() return (math.random() / 5 - 0.1) end,
dropout = 0.5,
timer = nerv.Timer(),
pr = nerv.ParamRepo(),
}
return global_conf
end
function get_layers(global_conf)
local pr = global_conf.pr
local layers = {
['nerv.LSTMLayer'] = {},
['nerv.DropoutLayer'] = {},
['nerv.SelectLinearLayer'] = {
['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr},
},
['nerv.CombinerLayer'] = {},
['nerv.AffineLayer'] = {
output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr}
},
['nerv.SoftmaxCELayer'] = {
softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}},
},
}
for i = 1, global_conf.layer_num do
layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr}
layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}}
layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}}
end
return layers
end
function get_connections(global_conf)
local connections = {
{'[1]', 'select[1]', 0},
{'select[1]', 'lstm1[1]', 0},
{'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0},
{'output[1]', 'softmax[1]', 0},
{'[2]', 'softmax[2]', 0},
{'softmax[1]', '