aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/relu.lua
diff options
context:
space:
mode:
authorQi Liu <liuq901@163.com>2016-04-29 16:00:20 +0800
committerQi Liu <liuq901@163.com>2016-04-29 16:00:20 +0800
commit1f5568a9e1457dcf5aadd08749aef6194370b43f (patch)
treed705bbec5b701774a144810c5e60a73e346f9c39 /nerv/layer/relu.lua
parent6051c37a2c55365a5834d5b4c11d973e2e4e1ad4 (diff)
add relu layer
Diffstat (limited to 'nerv/layer/relu.lua')
-rw-r--r--nerv/layer/relu.lua33
1 files changed, 33 insertions, 0 deletions
diff --git a/nerv/layer/relu.lua b/nerv/layer/relu.lua
new file mode 100644
index 0000000..b7951e7
--- /dev/null
+++ b/nerv/layer/relu.lua
@@ -0,0 +1,33 @@
+local ReluLayer = nerv.class('nerv.ReluLayer', 'nerv.Layer')
+
+function ReluLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+end
+
+function ReluLayer:bind_params()
+end
+
+function ReluLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error('mismatching dimensions of input and output')
+ end
+end
+
+function ReluLayer:batch_resize(batch_size)
+end
+
+function ReluLayer:update()
+end
+
+function ReluLayer:propagate(input, output)
+ output[1]:relu(input[1])
+end
+
+function ReluLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:relu_grad(bp_err[1], output[1])
+end
+
+function ReluLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end