From 37af4bed9c3680fdb9db569605f15013e9b6b64d Mon Sep 17 00:00:00 2001 From: Determinant Date: Fri, 5 Jun 2015 17:53:05 +0800 Subject: add get_params to all layers --- layer/affine.lua | 10 +++++++--- layer/bias.lua | 4 ++++ layer/init.lua | 4 ++++ layer/sigmoid.lua | 4 ++++ layer/softmax_ce.lua | 4 ++++ layer/window.lua | 4 ++++ 6 files changed, 27 insertions(+), 3 deletions(-) (limited to 'layer') diff --git a/layer/affine.lua b/layer/affine.lua index 59a0e91..2cd7acb 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -41,7 +41,7 @@ function AffineLayer:init() self.bc:fill(0) end -function nerv.AffineLayer:update(bp_err, input, output) +function AffineLayer:update(bp_err, input, output) local ltp = self.ltp.trans local bp = self.bp.trans local ltc = self.ltc @@ -60,13 +60,17 @@ function nerv.AffineLayer:update(bp_err, input, output) ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost) end -function nerv.AffineLayer:propagate(input, output) +function AffineLayer:propagate(input, output) -- apply linear transform output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N') -- add bias output[1]:add_row(self.bp.trans, 1.0) end -function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output) +function AffineLayer:back_propagate(next_bp_err, bp_err, input, output) next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T') end + +function AffineLayer:get_params() + return {self.ltp, self.bp} +end diff --git a/layer/bias.lua b/layer/bias.lua index 6ddfe11..8cd326b 100644 --- a/layer/bias.lua +++ b/layer/bias.lua @@ -22,3 +22,7 @@ function BiasLayer:propagate(input, output) output[1]:copy_fromd(input[1]) output[1]:add_row(self.bias.trans, 1.0) end + +function BiasLayer:get_params() + return {self.bias} +end diff --git a/layer/init.lua b/layer/init.lua index 38bcd7f..3011f8e 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -58,6 +58,10 @@ function Layer:check_dim_len(len_in, len_out) end end +function Layer:get_params() + nerv.error_method_not_implemented() +end + function Layer:get_dim() return self.dim_in, self.dim_out end diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua index 220b7af..dd10fb9 100644 --- a/layer/sigmoid.lua +++ b/layer/sigmoid.lua @@ -25,3 +25,7 @@ end function SigmoidLayer:back_propagate(next_bp_err, bp_err, input, output) next_bp_err[1]:sigmoid_grad(bp_err[1], output[1]) end + +function SigmoidLayer:get_params() + return {} +end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua index cd57010..79e859e 100644 --- a/layer/softmax_ce.lua +++ b/layer/softmax_ce.lua @@ -50,3 +50,7 @@ function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output) end next_bp_err[1]:add(self.soutput, label, 1.0, -1.0) end + +function SoftmaxCELayer:get_params() + return {} +end diff --git a/layer/window.lua b/layer/window.lua index 8e9e761..b381c9b 100644 --- a/layer/window.lua +++ b/layer/window.lua @@ -22,3 +22,7 @@ function WindowLayer:propagate(input, output) output[1]:copy_fromd(input[1]) output[1]:scale_row(self.window.trans) end + +function WindowLayer:get_params() + return {self.window} +end -- cgit v1.2.3-70-g09d2