diff options
Diffstat (limited to 'nerv/layer')
-rw-r--r-- | nerv/layer/duplicate.lua | 12 | ||||
-rw-r--r-- | nerv/layer/identity.lua | 33 |
2 files changed, 39 insertions, 6 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua index 58758e8..fbd4a9e 100644 --- a/nerv/layer/duplicate.lua +++ b/nerv/layer/duplicate.lua @@ -16,13 +16,10 @@ function DuplicateLayer:__init(id, global_conf, layer_conf) end end -function DuplicateLayer:init(batch_size) +function DuplicateLayer:init() end -function DuplicateLayer:batch_resize(batch_size) -end - -function DuplicateLayer:update(bp_err, input, output) +function DuplicateLayer:batch_resize() end function DuplicateLayer:propagate(input, output) @@ -32,9 +29,12 @@ function DuplicateLayer:propagate(input, output) end end -function DuplicateLayer:back_propagate(bp_err, next_bp_err, input, output) +function DuplicateLayer:back_propagate(bp_err, next_bp_err) next_bp_err:copy_from(bp_err[1]) for i = 2, #self.dim_out do next_bp_err:add(next_bp_err, bp_err[i], 1.0, 1.0) end end + +function DuplicateLayer:update() +end diff --git a/nerv/layer/identity.lua b/nerv/layer/identity.lua new file mode 100644 index 0000000..dc796fb --- /dev/null +++ b/nerv/layer/identity.lua @@ -0,0 +1,33 @@ +local IdentityLayer = nerv.class('nerv.IdentityLayer', 'nerv.Layer') + +function IdentityLayer:__init(id, global_conf, layer_conf) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + self:check_dim_len(1, 1) + if self.dim_in[1] ~= self.dim_out[1] then + nerv.error('mismatching dimensions of input and output') + end +end + +function IdentityLayer:init() +end + +function IdentityLayer:batch_resize() +end + +function IdentityLayer:propagate(input, output) + output[1]:copy_from(input[1]) +end + +function IdentityLayer:back_propagate(bp_err, next_bp_err) + next_bp_err[1]:copy_from(bp_err) +end + +function IdentityLayer:update() +end + +function IdentityLayer:get_params() + return nerv.ParamRepo({}) +end |