aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/tanh.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/tanh.lua')
-rw-r--r--nerv/layer/tanh.lua35
1 files changed, 35 insertions, 0 deletions
diff --git a/nerv/layer/tanh.lua b/nerv/layer/tanh.lua
new file mode 100644
index 0000000..e1c32f2
--- /dev/null
+++ b/nerv/layer/tanh.lua
@@ -0,0 +1,35 @@
+local TanhLayer = nerv.class("nerv.TanhLayer", "nerv.Layer")
+
+function TanhLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1)
+end
+
+function TanhLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
+end
+
+function TanhLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
+function TanhLayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function TanhLayer:propagate(input, output)
+ output[1]:tanh(input[1])
+end
+
+function TanhLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:tanh_grad(bp_err[1], output[1])
+end
+
+function TanhLayer:get_params()
+ return nerv.ParamRepo({})
+end