From 44752a8175c54effd0c901333a7eb4cc71dd6810 Mon Sep 17 00:00:00 2001 From: Determinant Date: Wed, 27 May 2015 12:02:44 +0800 Subject: add implementation for sigmoid layer (not tested) --- layer/affine.lua | 1 + layer/sigmoid.lua | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 layer/sigmoid.lua (limited to 'layer') diff --git a/layer/affine.lua b/layer/affine.lua index 221aacd..94e7497 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -11,6 +11,7 @@ function LinearTransParam:write(pfhandle) end function AffineLayer:__init(id, global_conf, ltp, bp) + self.id = id self.ltp = ltp self.bp = bp self.gconf = global_conf diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua new file mode 100644 index 0000000..d69e9e3 --- /dev/null +++ b/layer/sigmoid.lua @@ -0,0 +1,14 @@ +local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer") + +function SigmoidLayer:__init(id, global_conf) + self.id = id + self.gconf = global_conf +end + +function SigmoidLayer:propagate(input, output) + output:sigmoid(input) +end + +function SigmoidLayer:back_propagate(next_bp_err, bp_err, input, output) + next_bp_err:sigmoid(bp_err, output) +end -- cgit v1.2.3-70-g09d2