aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/graph.lua10
-rw-r--r--nerv/layer/rnn.lua22
2 files changed, 24 insertions, 8 deletions
diff --git a/nerv/layer/graph.lua b/nerv/layer/graph.lua
index 68d5f51..ddbc85e 100644
--- a/nerv/layer/graph.lua
+++ b/nerv/layer/graph.lua
@@ -65,7 +65,17 @@ function GraphLayer:discover(id, layer_repo)
return ref
end
+local function reverse(connections)
+ for i = 1, #connections do
+ connections[i][3] = connections[i][3] * -1
+ end
+end
+
function GraphLayer:graph_init(layer_repo, connections)
+ if self.lconf.reversed then
+ reverse(connections)
+ end
+
local layers = {}
layers['<input>'] = {
inputs = {},
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index 333be9e..aad2b94 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -2,13 +2,17 @@ local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')
function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
- self:check_dim_len(1, 1)
+ self:check_dim_len(-1, 1)
+ if #self.dim_in == 0 then
+ nerv.error('RNN Layer %s has no input', self.id)
+ end
- if layer_conf.activation == nil then
- layer_conf.activation = 'nerv.SigmoidLayer'
+ self.activation = layer_conf.activation
+ if self.activation == nil then
+ self.activation = 'nerv.SigmoidLayer'
end
- local din = layer_conf.dim_in[1]
+ local din = layer_conf.dim_in
local dout = layer_conf.dim_out[1]
local pr = layer_conf.pr
@@ -18,9 +22,9 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local layers = {
['nerv.AffineLayer'] = {
- main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
+ main = {dim_in = table.connect({dout}, din), dim_out = {dout}, pr = pr},
},
- [layer_conf.activation] = {
+ [self.activation] = {
activation = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.DuplicateLayer'] = {
@@ -29,12 +33,14 @@ function RNNLayer:__init(id, global_conf, layer_conf)
}
local connections = {
- {'<input>[1]', 'main[1]', 0},
{'main[1]', 'activation[1]', 0},
{'activation[1]', 'duplicate[1]', 0},
- {'duplicate[1]', 'main[2]', 1},
+ {'duplicate[1]', 'main[1]', 1},
{'duplicate[2]', '<output>[1]', 0},
}
+ for i = 1, #din do
+ table.insert(connections, {'<input>[' .. i .. ']', 'main[' .. (i + 1) .. ']', 0})
+ end
self:add_prefix(layers, connections)
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)