aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/duplicate.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-11 13:59:46 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-11 13:59:46 +0800
commite6d28de460dfd06d696d369119247179c7a7525d (patch)
tree6263fb1555ddcba962edc31ee1312679135c06c4 /nerv/layer/duplicate.lua
parenta32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (diff)
parentf26288ba61d3d16866e1b227a71e7d9c46923436 (diff)
Merge branch 'master' of https://github.com/liuq901/nerv into liuq901-master
Conflicts: nerv/layer/init.lua nerv/nn/layer_repo.lua
Diffstat (limited to 'nerv/layer/duplicate.lua')
-rw-r--r--nerv/layer/duplicate.lua41
1 files changed, 41 insertions, 0 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
new file mode 100644
index 0000000..137472b
--- /dev/null
+++ b/nerv/layer/duplicate.lua
@@ -0,0 +1,41 @@
+local DuplicateLayer = nerv.class('nerv.DuplicateLayer', 'nerv.Layer')
+
+function DuplicateLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, -1)
+ if #self.dim_out < 1 then
+ nerv.error('no output specified')
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= self.dim_in[1] then
+ nerv.error('mismatching dimensions of outputs')
+ end
+ end
+end
+
+function DuplicateLayer:init()
+end
+
+function DuplicateLayer:batch_resize()
+end
+
+function DuplicateLayer:propagate(input, output)
+ for i = 1, #self.dim_out do
+ output[i]:copy_from(input[1])
+ -- FIXME: use reference copy to speed up
+ end
+end
+
+function DuplicateLayer:back_propagate(bp_err, next_bp_err)
+ next_bp_err[1]:copy_from(bp_err[1])
+ for i = 2, #self.dim_out do
+ next_bp_err[1]:add(next_bp_err[1], bp_err[i], 1.0, 1.0)
+ end
+end
+
+function DuplicateLayer:update()
+end
+
+function DuplicateLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end