aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/relu.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/relu.lua')
-rw-r--r--nerv/layer/relu.lua33
1 files changed, 33 insertions, 0 deletions
diff --git a/nerv/layer/relu.lua b/nerv/layer/relu.lua
new file mode 100644
index 0000000..b7951e7
--- /dev/null
+++ b/nerv/layer/relu.lua
@@ -0,0 +1,33 @@
+local ReluLayer = nerv.class('nerv.ReluLayer', 'nerv.Layer')
+
+function ReluLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+end
+
+function ReluLayer:bind_params()
+end
+
+function ReluLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error('mismatching dimensions of input and output')
+ end
+end
+
+function ReluLayer:batch_resize(batch_size)
+end
+
+function ReluLayer:update()
+end
+
+function ReluLayer:propagate(input, output)
+ output[1]:relu(input[1])
+end
+
+function ReluLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:relu_grad(bp_err[1], output[1])
+end
+
+function ReluLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end