From e934b616496940bfe0924ca1992035d2346baa62 Mon Sep 17 00:00:00 2001 From: Determinant Date: Thu, 28 May 2015 17:01:10 +0800 Subject: add softmax + ce layer; test_dnn_layers produces the same result as TNet --- layer/sigmoid.lua | 3 +++ layer/softmax_ce.lua | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 layer/softmax_ce.lua (limited to 'layer') diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua index 41a6ef7..ca34419 100644 --- a/layer/sigmoid.lua +++ b/layer/sigmoid.lua @@ -5,6 +5,9 @@ function SigmoidLayer:__init(id, global_conf) self.gconf = global_conf end +function SigmoidLayer:init() +end + function SigmoidLayer:update(bp_err, input, output) -- no params, therefore do nothing end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua new file mode 100644 index 0000000..37d2864 --- /dev/null +++ b/layer/softmax_ce.lua @@ -0,0 +1,32 @@ +local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer") + +function SoftmaxCELayer:__init(id, global_conf) + self.id = id + self.gconf = global_conf +end + +function SoftmaxCELayer:init() + self.total_ce = 0.0 + self.total_frames = 0 +end + +function SoftmaxCELayer:update(bp_err, input, output) + -- no params, therefore do nothing +end + +function SoftmaxCELayer:propagate(input, output) + local soutput = input[0]:create() -- temporary value for calc softmax + self.soutput = soutput + soutput:softmax(input[0]) + local ce = soutput:create() + ce:log_elem(soutput) + ce:mul_elem(ce, input[1]) + -- add total ce + self.total_ce = self.total_ce - ce:rowsum():colsum()[0] + self.total_frames = self.total_frames + soutput:nrow() +end + +function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output) + -- softmax output - label + next_bp_err[0]:add(self.soutput, input[1], 1.0, -1.0) +end -- cgit v1.2.3-70-g09d2