aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
authorQi Liu <liuq901@163.com>2016-03-01 20:00:53 +0800
committerQi Liu <liuq901@163.com>2016-03-01 20:00:53 +0800
commit2ea3e139af91eb894d904d7a956e28619b1a70f6 (patch)
tree249bb3ace8004f8d79d0e41ee97587fcc015d3de /nerv/layer
parent1a424bf9233f9b1c67ef135f1a3892b7986c5564 (diff)
network init complete
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/duplicate.lua40
-rw-r--r--nerv/layer/graph.lua3
-rw-r--r--nerv/layer/init.lua1
-rw-r--r--nerv/layer/rnn.lua8
4 files changed, 50 insertions, 2 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
new file mode 100644
index 0000000..58758e8
--- /dev/null
+++ b/nerv/layer/duplicate.lua
@@ -0,0 +1,40 @@
+local DuplicateLayer = nerv.class('nerv.DuplicateLayer', 'nerv.Layer')
+
+function DuplicateLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(1, -1)
+ if #self.dim_out < 1 then
+ nerv.error('no output specified')
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= self.dim_in[1] then
+ nerv.error('mismatching dimensions of outputs')
+ end
+ end
+end
+
+function DuplicateLayer:init(batch_size)
+end
+
+function DuplicateLayer:batch_resize(batch_size)
+end
+
+function DuplicateLayer:update(bp_err, input, output)
+end
+
+function DuplicateLayer:propagate(input, output)
+ for i = 1, #self.dim_out do
+ output[i]:copy_from(input[1])
+ -- FIXME: use reference copy to speed up
+ end
+end
+
+function DuplicateLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err:copy_from(bp_err[1])
+ for i = 2, #self.dim_out do
+ next_bp_err:add(next_bp_err, bp_err[i], 1.0, 1.0)
+ end
+end
diff --git a/nerv/layer/graph.lua b/nerv/layer/graph.lua
index 36a9672..d72d849 100644
--- a/nerv/layer/graph.lua
+++ b/nerv/layer/graph.lua
@@ -64,6 +64,9 @@ function GraphLayer:graph_init(layer_repo, connections)
local id_to, port_to = parse_id(to)
local ref_from = self:discover(id_from, layer_repo)
local ref_to = self:discover(id_to, layer_repo)
+ if ref_from.outputs[port_from] ~= nil then
+ nerv.error('%s has already been attached', from)
+ end
if ref_to.inputs[port_to] ~= nil then
nerv.error('%s has already been attached', to)
end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 5e3395c..6f26d4d 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -125,6 +125,7 @@ nerv.include('lstm_gate.lua')
nerv.include('dropout.lua')
nerv.include('gru.lua')
nerv.include('rnn.lua')
+nerv.include('duplicate.lua')
-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index 8816891..806ac58 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -22,6 +22,9 @@ function RNNLayer:__init(id, global_conf, layer_conf)
['nerv.SigmoidLayer'] = {
sigmoid = {dim_in = {dout}, dim_out = {dout}},
},
+ ['nerv.DuplicateLayer'] = {
+ dup = {dim_in = {dout}, dim_out = {dout, dout}},
+ }
}
local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
@@ -29,8 +32,9 @@ function RNNLayer:__init(id, global_conf, layer_conf)
local connections = {
{'<input>[1]', 'main[1]', 0},
{'main[1]', 'sigmoid[1]', 0},
- {'sigmoid[1]', 'main[2]', 1},
- {'sigmoid[1]', '<output>[1]', 0},
+ {'sigmoid[1]', 'dup[1]', 0},
+ {'dup[1]', 'main[2]', 1},
+ {'dup[2]', '<output>[1]', 0},
}
self:graph_init(layer_repo, connections)