aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn/layers/elem_mul.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/tnn/layers/elem_mul.lua')
-rw-r--r--nerv/examples/lmptb/tnn/layers/elem_mul.lua38
1 files changed, 38 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/layers/elem_mul.lua b/nerv/examples/lmptb/tnn/layers/elem_mul.lua
new file mode 100644
index 0000000..c809d3e
--- /dev/null
+++ b/nerv/examples/lmptb/tnn/layers/elem_mul.lua
@@ -0,0 +1,38 @@
+local ElemMulLayer = nerv.class('nerv.ElemMulLayer', 'nerv.Layer')
+
+function ElemMulLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+
+ self:check_dim_len(2, 1) -- Element-multiply input[1] and input[2]
+end
+
+function ElemMulLayer:init(batch_size)
+ if self.dim_in[1] ~= self.dim_in[2] or
+ self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("dim_in and dim_out mismatch for ElemMulLayer")
+ end
+end
+
+function ElemMulLayer:batch_resize(batch_size)
+ --do nothing
+end
+
+function ElemMulLayer:propagate(input, output)
+ output[1]:mul_elem(input[1], input[2])
+end
+
+function ElemMulLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:mul_elem(bp_err[1], input[2])
+ next_bp_err[2]:mul_elem(bp_err[1], input[1])
+end
+
+function ElemMulLayer:update(bp_err, input, output)
+ --do nothing
+end
+
+function ElemMulLayer:get_params()
+ return nerv.ParamRepo({})
+end