aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/duplicate.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/duplicate.lua')
-rw-r--r--nerv/layer/duplicate.lua12
1 files changed, 6 insertions, 6 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
index 58758e8..fbd4a9e 100644
--- a/nerv/layer/duplicate.lua
+++ b/nerv/layer/duplicate.lua
@@ -16,13 +16,10 @@ function DuplicateLayer:__init(id, global_conf, layer_conf)
end
end
-function DuplicateLayer:init(batch_size)
+function DuplicateLayer:init()
end
-function DuplicateLayer:batch_resize(batch_size)
-end
-
-function DuplicateLayer:update(bp_err, input, output)
+function DuplicateLayer:batch_resize()
end
function DuplicateLayer:propagate(input, output)
@@ -32,9 +29,12 @@ function DuplicateLayer:propagate(input, output)
end
end
-function DuplicateLayer:back_propagate(bp_err, next_bp_err, input, output)
+function DuplicateLayer:back_propagate(bp_err, next_bp_err)
next_bp_err:copy_from(bp_err[1])
for i = 2, #self.dim_out do
next_bp_err:add(next_bp_err, bp_err[i], 1.0, 1.0)
end
end
+
+function DuplicateLayer:update()
+end