aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/init.lua1
-rw-r--r--nerv/layer/relu.lua33
2 files changed, 34 insertions, 0 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 7521b7a..d175d02 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -280,6 +280,7 @@ nerv.include('duplicate.lua')
nerv.include('identity.lua')
nerv.include('projection.lua')
nerv.include('lstmp.lua')
+nerv.include('relu.lua')
-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
diff --git a/nerv/layer/relu.lua b/nerv/layer/relu.lua
new file mode 100644
index 0000000..b7951e7
--- /dev/null
+++ b/nerv/layer/relu.lua
@@ -0,0 +1,33 @@
+local ReluLayer = nerv.class('nerv.ReluLayer', 'nerv.Layer')
+
+function ReluLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+end
+
+function ReluLayer:bind_params()
+end
+
+function ReluLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error('mismatching dimensions of input and output')
+ end
+end
+
+function ReluLayer:batch_resize(batch_size)
+end
+
+function ReluLayer:update()
+end
+
+function ReluLayer:propagate(input, output)
+ output[1]:relu(input[1])
+end
+
+function ReluLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:relu_grad(bp_err[1], output[1])
+end
+
+function ReluLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end