aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/elem_mul.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-10 13:40:11 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-10 13:40:11 +0800
commita32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (patch)
treea19f21f8cbadecff7357f9a102f160f5fe699b65 /nerv/layer/elem_mul.lua
parent4a6872601f05e9ecc059f83fb64a0a4887992b99 (diff)
major change: clearer param binding semantics; permit rebinding; enable
resuming from previous training
Diffstat (limited to 'nerv/layer/elem_mul.lua')
-rw-r--r--nerv/layer/elem_mul.lua11
1 files changed, 6 insertions, 5 deletions
diff --git a/nerv/layer/elem_mul.lua b/nerv/layer/elem_mul.lua
index fe80a3f..f03649b 100644
--- a/nerv/layer/elem_mul.lua
+++ b/nerv/layer/elem_mul.lua
@@ -1,14 +1,15 @@
local ElemMulLayer = nerv.class('nerv.ElemMulLayer', 'nerv.Layer')
function ElemMulLayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.gconf = global_conf
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
-- element-wise multiplication of input[1] and input[2]
self:check_dim_len(2, 1)
end
+function ElemMulLayer:bind_params()
+ -- do nothing
+end
+
function ElemMulLayer:init(batch_size)
if self.dim_in[1] ~= self.dim_in[2] or
self.dim_in[1] ~= self.dim_out[1] then
@@ -34,5 +35,5 @@ function ElemMulLayer:update(bp_err, input, output)
end
function ElemMulLayer:get_params()
- return nerv.ParamRepo({})
+ return nerv.ParamRepo({}, self.loc_type)
end