diff options
author | Qi Liu <[email protected]> | 2016-03-09 11:58:13 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-03-09 11:58:13 +0800 |
commit | 05fcde5bf0caa1ceb70fef02fc88eda6f00c5ed5 (patch) | |
tree | a3bfb245d3f106525ec2ff4f987848fcd3f56217 /lua/config.lua | |
parent | 4e56b863203ab6919192efe973ba9f8ee0d5ac65 (diff) |
add recipe
Diffstat (limited to 'lua/config.lua')
-rw-r--r-- | lua/config.lua | 67 |
1 files changed, 67 insertions, 0 deletions
diff --git a/lua/config.lua b/lua/config.lua new file mode 100644 index 0000000..9d73b64 --- /dev/null +++ b/lua/config.lua @@ -0,0 +1,67 @@ +function get_global_conf() + local global_conf = { + lrate = 0.15, + wcost = 1e-5, + momentum = 0, + clip = 5, + cumat_type = nerv.CuMatrixFloat, + mmat_type = nerv.MMatrixFloat, + vocab_size = 10000, + nn_act_default = 0, + hidden_size = 300, + layer_num = 1, + chunk_size = 15, + batch_size = 20, + max_iter = 1, + param_random = function() return (math.random() / 5 - 0.1) end, + dropout = 0.5, + timer = nerv.Timer(), + pr = nerv.ParamRepo(), + } + return global_conf +end + +function get_layers(global_conf) + local pr = global_conf.pr + local layers = { + ['nerv.LSTMLayer'] = {}, + ['nerv.DropoutLayer'] = {}, + ['nerv.SelectLinearLayer'] = { + ['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr}, + }, + ['nerv.CombinerLayer'] = {}, + ['nerv.AffineLayer'] = { + output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr} + }, + ['nerv.SoftmaxCELayer'] = { + softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}}, + }, + } + for i = 1, global_conf.layer_num do + layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr} + layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}} + layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}} + end + return layers +end + +function get_connections(global_conf) + local connections = { + {'<input>[1]', 'select[1]', 0}, + {'select[1]', 'lstm1[1]', 0}, + {'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0}, + {'output[1]', 'softmax[1]', 0}, + {'<input>[2]', 'softmax[2]', 0}, + {'softmax[1]', '<output>[1]', 0}, + } + for i = 1, global_conf.layer_num do + table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0}) + table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1}) + table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1}) + table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0}) + if i > 1 then + table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0}) + end + end + return connections +end |