diff options
author | Qi Liu <[email protected]> | 2016-03-11 13:32:00 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-03-11 13:32:00 +0800 |
commit | f26288ba61d3d16866e1b227a71e7d9c46923436 (patch) | |
tree | ea41bb08994d9d2ee59c3ac5f3ec2c41bcaac6d2 /lua/config.lua | |
parent | 05fcde5bf0caa1ceb70fef02fc88eda6f00c5ed5 (diff) |
update mini_batch_init
Diffstat (limited to 'lua/config.lua')
-rw-r--r-- | lua/config.lua | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/lua/config.lua b/lua/config.lua index 9d73b64..1ec1198 100644 --- a/lua/config.lua +++ b/lua/config.lua @@ -12,7 +12,7 @@ function get_global_conf() layer_num = 1, chunk_size = 15, batch_size = 20, - max_iter = 1, + max_iter = 3, param_random = function() return (math.random() / 5 - 0.1) end, dropout = 0.5, timer = nerv.Timer(), @@ -34,7 +34,7 @@ function get_layers(global_conf) output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr} }, ['nerv.SoftmaxCELayer'] = { - softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}}, + softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true}, }, } for i = 1, global_conf.layer_num do |