aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/network_debug/config.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-15 15:46:05 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-15 15:46:05 +0800
commit07fc1e2794027d44c255e1062c4491346b101a08 (patch)
tree8e7217b9c5e9570b94af5aaad3f94d1a37cfe40b /nerv/examples/network_debug/config.lua
parenta5a4d2735b595fc9fadc9c7e91198786d3c0e078 (diff)
parente15307f071813e2eb56f7f83229b91141961325a (diff)
Merge branch 'master' of github.com:liuq901/nerv into liuq901-master
Diffstat (limited to 'nerv/examples/network_debug/config.lua')
-rw-r--r--nerv/examples/network_debug/config.lua10
1 files changed, 8 insertions, 2 deletions
diff --git a/nerv/examples/network_debug/config.lua b/nerv/examples/network_debug/config.lua
index e20d5a9..0429e9a 100644
--- a/nerv/examples/network_debug/config.lua
+++ b/nerv/examples/network_debug/config.lua
@@ -35,6 +35,10 @@ function get_layers(global_conf)
['nerv.SoftmaxCELayer'] = {
softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true},
},
+ ['nerv.DuplicateLayer'] = {
+ dup1 = {dim_in = {1}, dim_out = {1}},
+ dup2 = {dim_in = {1}, dim_out = {1}},
+ },
}
for i = 1, global_conf.layer_num do
layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}, pr = pr}
@@ -45,12 +49,14 @@ end
function get_connections(global_conf)
local connections = {
- {'<input>[1]', 'select[1]', 0},
+ {'<input>[1]', 'dup1[1]', 0},
+ {'dup1[1]', 'select[1]', 0},
{'select[1]', 'lstm1[1]', 0},
{'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0},
{'output[1]', 'softmax[1]', 0},
{'<input>[2]', 'softmax[2]', 0},
- {'softmax[1]', '<output>[1]', 0},
+ {'softmax[1]', 'dup2[1]', 0},
+ {'dup2[1]', '<output>[1]', 0},
}
for i = 1, global_conf.layer_num do
table.insert(connections, {'lstm' .. i .. '[1]', 'dropout' .. i .. '[1]', 0})