aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r--nerv/layer/rnn.lua15
1 files changed, 3 insertions, 12 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index fd6e753..63e0b55 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -4,12 +4,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1)
if #self.dim_in == 0 then
- nerv.error('RNN Layer %s has no input', self.id)
- end
-
- self.activation = layer_conf.activation
- if self.activation == nil then
- self.activation = 'nerv.SigmoidLayer'
+ nerv.error('RNN layer %s has no input', self.id)
end
local din = layer_conf.dim_in
@@ -22,10 +17,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local layers = {
['nerv.AffineLayer'] = {
- main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
- },
- [self.activation] = {
- activation = {dim_in = {dout}, dim_out = {dout}},
+ main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr, activation = layer_conf.activation},
},
['nerv.DuplicateLayer'] = {
duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
@@ -33,8 +25,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
}
local connections = {
- {'main[1]', 'activation[1]', 0},
- {'activation[1]', 'duplicate[1]', 0},
+ {'main[1]', 'duplicate[1]', 0},
{'duplicate[1]', 'main[1]', 1},
{'duplicate[2]', '<output>[1]', 0},
}