aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r--nerv/layer/rnn.lua22
1 files changed, 14 insertions, 8 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index 0b5ccaa..aad2b94 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -2,13 +2,17 @@ local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')
function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
- self:check_dim_len(1, 1)
+ self:check_dim_len(-1, 1)
+ if #self.dim_in == 0 then
+ nerv.error('RNN Layer %s has no input', self.id)
+ end
- if layer_conf.activation == nil then
- layer_conf.activation = 'nerv.SigmoidLayer'
+ self.activation = layer_conf.activation
+ if self.activation == nil then
+ self.activation = 'nerv.SigmoidLayer'
end
- local din = layer_conf.dim_in[1]
+ local din = layer_conf.dim_in
local dout = layer_conf.dim_out[1]
local pr = layer_conf.pr
@@ -18,9 +22,9 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local layers = {
['nerv.AffineLayer'] = {
- main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
+ main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
},
- [layers.activation] = {
+ [self.activation] = {
activation = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.DuplicateLayer'] = {
@@ -29,12 +33,14 @@ function RNNLayer:__init(id, global_conf, layer_conf)
}
local connections = {
- {'<input>[1]', 'main[1]', 0},
{'main[1]', 'activation[1]', 0},
{'activation[1]', 'duplicate[1]', 0},
- {'duplicate[1]', 'main[2]', 1},
+ {'duplicate[1]', 'main[1]', 1},
{'duplicate[2]', '<output>[1]', 0},
}
+ for i = 1, #din do
+ table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. (i + 1) .. ']', 0})
+ end
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)