diff options
author | txh18 <cloudygooseg@gmail.com> | 2015-11-25 23:42:37 +0800 |
---|---|---|
committer | txh18 <cloudygooseg@gmail.com> | 2015-11-25 23:42:37 +0800 |
commit | 75a2d6a2a08caf987017f5a9043ac93afcd70980 (patch) | |
tree | d3b08fa846de56f3cd8a3f9a7bd3add200840ea8 /nerv/layer/tanh.lua | |
parent | ca3500f01ea7ce695a4dbf70d2be8244827097c9 (diff) |
changed auto-generating params, won not save in global_conf.param
Diffstat (limited to 'nerv/layer/tanh.lua')
-rw-r--r-- | nerv/layer/tanh.lua | 35 |
1 files changed, 35 insertions, 0 deletions
diff --git a/nerv/layer/tanh.lua b/nerv/layer/tanh.lua new file mode 100644 index 0000000..e1c32f2 --- /dev/null +++ b/nerv/layer/tanh.lua @@ -0,0 +1,35 @@ +local TanhLayer = nerv.class("nerv.TanhLayer", "nerv.Layer") + +function TanhLayer:__init(id, global_conf, layer_conf) + self.id = id + self.gconf = global_conf + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self:check_dim_len(1, 1) +end + +function TanhLayer:init() + if self.dim_in[1] ~= self.dim_out[1] then + nerv.error("mismatching dimensions of input and output") + end +end + +function TanhLayer:batch_resize(batch_size) + -- do nothing +end + +function TanhLayer:update(bp_err, input, output) + -- no params, therefore do nothing +end + +function TanhLayer:propagate(input, output) + output[1]:tanh(input[1]) +end + +function TanhLayer:back_propagate(bp_err, next_bp_err, input, output) + next_bp_err[1]:tanh_grad(bp_err[1], output[1]) +end + +function TanhLayer:get_params() + return nerv.ParamRepo({}) +end |