aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r--nerv/layer/rnn.lua20
1 files changed, 12 insertions, 8 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index e59cf5b..0b5ccaa 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -4,6 +4,10 @@ function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1)
+ if layer_conf.activation == nil then
+ layer_conf.activation = 'nerv.SigmoidLayer'
+ end
+
local din = layer_conf.dim_in[1]
local dout = layer_conf.dim_out[1]
@@ -16,20 +20,20 @@ function RNNLayer:__init(id, global_conf, layer_conf)
['nerv.AffineLayer'] = {
main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
},
- ['nerv.SigmoidLayer'] = {
- sigmoid = {dim_in = {dout}, dim_out = {dout}},
+ [layers.activation] = {
+ activation = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.DuplicateLayer'] = {
- dup = {dim_in = {dout}, dim_out = {dout, dout}},
- }
+ duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
+ },
}
local connections = {
{'<input>[1]', 'main[1]', 0},
- {'main[1]', 'sigmoid[1]', 0},
- {'sigmoid[1]', 'dup[1]', 0},
- {'dup[1]', 'main[2]', 1},
- {'dup[2]', '<output>[1]', 0},
+ {'main[1]', 'activation[1]', 0},
+ {'activation[1]', 'duplicate[1]', 0},
+ {'duplicate[1]', 'main[2]', 1},
+ {'duplicate[2]', '<output>[1]', 0},
}
self:add_prefix(layers, connections)