aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/duplicate.lua
diff options
context:
space:
mode:
authorQi Liu <liuq901@163.com>2016-03-01 20:00:53 +0800
committerQi Liu <liuq901@163.com>2016-03-01 20:00:53 +0800
commit2ea3e139af91eb894d904d7a956e28619b1a70f6 (patch)
tree249bb3ace8004f8d79d0e41ee97587fcc015d3de /nerv/layer/duplicate.lua
parent1a424bf9233f9b1c67ef135f1a3892b7986c5564 (diff)
network init complete
Diffstat (limited to 'nerv/layer/duplicate.lua')
-rw-r--r--nerv/layer/duplicate.lua40
1 files changed, 40 insertions, 0 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
new file mode 100644
index 0000000..58758e8
--- /dev/null
+++ b/nerv/layer/duplicate.lua
@@ -0,0 +1,40 @@
+local DuplicateLayer = nerv.class('nerv.DuplicateLayer', 'nerv.Layer')
+
+function DuplicateLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(1, -1)
+ if #self.dim_out < 1 then
+ nerv.error('no output specified')
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= self.dim_in[1] then
+ nerv.error('mismatching dimensions of outputs')
+ end
+ end
+end
+
+function DuplicateLayer:init(batch_size)
+end
+
+function DuplicateLayer:batch_resize(batch_size)
+end
+
+function DuplicateLayer:update(bp_err, input, output)
+end
+
+function DuplicateLayer:propagate(input, output)
+ for i = 1, #self.dim_out do
+ output[i]:copy_from(input[1])
+ -- FIXME: use reference copy to speed up
+ end
+end
+
+function DuplicateLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err:copy_from(bp_err[1])
+ for i = 2, #self.dim_out do
+ next_bp_err:add(next_bp_err, bp_err[i], 1.0, 1.0)
+ end
+end