diff options
author | Qi Liu <[email protected]> | 2016-03-11 18:18:59 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-03-11 18:18:59 +0800 |
commit | 2f46a5e2b37a054f482f76f4ac3d26b144cf988f (patch) | |
tree | e442e76741d664a29924c5f0ec6cc72e87345539 /lua/config.lua | |
parent | 13729e83219cd90e33f329c49a50f6f4a4420721 (diff) |
add lua
Diffstat (limited to 'lua/config.lua')
-rw-r--r-- | lua/config.lua | 67 |
1 files changed, 67 insertions, 0 deletions
diff --git a/lua/config.lua b/lua/config.lua new file mode 100644 index 0000000..ff98ae0 --- /dev/null +++ b/lua/config.lua @@ -0,0 +1,67 @@ +function get_global_conf() + local global_conf = { + lrate = 0.15, + wcost = 1e-5, + momentum = 0, + clip = 5, + cumat_type = nerv.CuMatrixFloat, + mmat_type = nerv.MMatrixFloat, + vocab_size = 10000, + nn_act_default = 0, + hidden_size = 300, + layer_num = 1, + chunk_size = 15, + batch_size = 20, + max_iter = 35, + param_random = function() return (math.random() / 5 - 0.1) end, + dropout_rate = 0.5, + timer = nerv.Timer(), + pr = nerv.ParamRepo(), + } + return global_conf +end + +function get_layers(global_conf) + local pr = global_conf.pr + local layers = { + ['nerv.LSTMLayer'] = {}, + ['nerv.DropoutLayer'] = {}, + ['nerv.SelectLinearLayer'] = { + ['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr}, + }, + ['nerv.CombinerLayer'] = {}, + ['nerv.AffineLayer'] = { + output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr} + }, + ['nerv.SoftmaxCELayer'] = { + softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true}, + }, + } + for i = 1, global_conf.layer_num do + layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr} + layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}} + layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}} + end + return layers +end + +function get_connections(global_conf) + local connections = { + {'<input>[1]', 'select[1]', 0}, + {'select[1]', 'lstm1[1]', 0}, + {'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0}, + {'output[1]', 'softmax[1]', 0}, + {'<input>[2]', 'softmax[2]', 0}, + {'softmax[1]', '<output>[1]', 0}, + } + for i = 1, global_conf.layer_num do + table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0}) + table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1}) + table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1}) + table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0}) + if i > 1 then + table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0}) + end + end + return connections +end |