aboutsummaryrefslogtreecommitdiff
path: root/lua/config.lua
diff options
context:
space:
mode:
authorQi Liu <[email protected]>2016-03-11 20:11:00 +0800
committerQi Liu <[email protected]>2016-03-11 20:11:00 +0800
commite2a9af061db485d4388902d738c9d8be3f94ab34 (patch)
tree468d6c6afa0801f6a6bf794b3674f8814b8827f7 /lua/config.lua
parent2f46a5e2b37a054f482f76f4ac3d26b144cf988f (diff)
add recipe and fix bugs
Diffstat (limited to 'lua/config.lua')
-rw-r--r--lua/config.lua67
1 files changed, 0 insertions, 67 deletions
diff --git a/lua/config.lua b/lua/config.lua
deleted file mode 100644
index ff98ae0..0000000
--- a/lua/config.lua
+++ /dev/null
@@ -1,67 +0,0 @@
-function get_global_conf()
- local global_conf = {
- lrate = 0.15,
- wcost = 1e-5,
- momentum = 0,
- clip = 5,
- cumat_type = nerv.CuMatrixFloat,
- mmat_type = nerv.MMatrixFloat,
- vocab_size = 10000,
- nn_act_default = 0,
- hidden_size = 300,
- layer_num = 1,
- chunk_size = 15,
- batch_size = 20,
- max_iter = 35,
- param_random = function() return (math.random() / 5 - 0.1) end,
- dropout_rate = 0.5,
- timer = nerv.Timer(),
- pr = nerv.ParamRepo(),
- }
- return global_conf
-end
-
-function get_layers(global_conf)
- local pr = global_conf.pr
- local layers = {
- ['nerv.LSTMLayer'] = {},
- ['nerv.DropoutLayer'] = {},
- ['nerv.SelectLinearLayer'] = {
- ['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr},
- },
- ['nerv.CombinerLayer'] = {},
- ['nerv.AffineLayer'] = {
- output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr}
- },
- ['nerv.SoftmaxCELayer'] = {
- softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true},
- },
- }
- for i = 1, global_conf.layer_num do
- layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr}
- layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}}
- layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}}
- end
- return layers
-end
-
-function get_connections(global_conf)
- local connections = {
- {'<input>[1]', 'select[1]', 0},
- {'select[1]', 'lstm1[1]', 0},
- {'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0},
- {'output[1]', 'softmax[1]', 0},
- {'<input>[2]', 'softmax[2]', 0},
- {'softmax[1]', '<output>[1]', 0},
- }
- for i = 1, global_conf.layer_num do
- table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0})
- table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1})
- table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1})
- table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0})
- if i > 1 then
- table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0})
- end
- end
- return connections
-end