diff options
author | Determinant <[email protected]> | 2015-06-05 17:53:05 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-06-05 17:53:05 +0800 |
commit | 37af4bed9c3680fdb9db569605f15013e9b6b64d (patch) | |
tree | 5f870d23f241edbc670c2778c955f6bd9d5eb1d5 /layer | |
parent | eba6049a82455499c68ee875843b6f44d6164fa5 (diff) |
add get_params to all layers
Diffstat (limited to 'layer')
-rw-r--r-- | layer/affine.lua | 10 | ||||
-rw-r--r-- | layer/bias.lua | 4 | ||||
-rw-r--r-- | layer/init.lua | 4 | ||||
-rw-r--r-- | layer/sigmoid.lua | 4 | ||||
-rw-r--r-- | layer/softmax_ce.lua | 4 | ||||
-rw-r--r-- | layer/window.lua | 4 |
6 files changed, 27 insertions, 3 deletions
diff --git a/layer/affine.lua b/layer/affine.lua index 59a0e91..2cd7acb 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -41,7 +41,7 @@ function AffineLayer:init() self.bc:fill(0) end -function nerv.AffineLayer:update(bp_err, input, output) +function AffineLayer:update(bp_err, input, output) local ltp = self.ltp.trans local bp = self.bp.trans local ltc = self.ltc @@ -60,13 +60,17 @@ function nerv.AffineLayer:update(bp_err, input, output) ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost) end -function nerv.AffineLayer:propagate(input, output) +function AffineLayer:propagate(input, output) -- apply linear transform output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N') -- add bias output[1]:add_row(self.bp.trans, 1.0) end -function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output) +function AffineLayer:back_propagate(next_bp_err, bp_err, input, output) next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T') end + +function AffineLayer:get_params() + return {self.ltp, self.bp} +end diff --git a/layer/bias.lua b/layer/bias.lua index 6ddfe11..8cd326b 100644 --- a/layer/bias.lua +++ b/layer/bias.lua @@ -22,3 +22,7 @@ function BiasLayer:propagate(input, output) output[1]:copy_fromd(input[1]) output[1]:add_row(self.bias.trans, 1.0) end + +function BiasLayer:get_params() + return {self.bias} +end diff --git a/layer/init.lua b/layer/init.lua index 38bcd7f..3011f8e 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -58,6 +58,10 @@ function Layer:check_dim_len(len_in, len_out) end end +function Layer:get_params() + nerv.error_method_not_implemented() +end + function Layer:get_dim() return self.dim_in, self.dim_out end diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua index 220b7af..dd10fb9 100644 --- a/layer/sigmoid.lua +++ b/layer/sigmoid.lua @@ -25,3 +25,7 @@ end function SigmoidLayer:back_propagate(next_bp_err, bp_err, input, output) next_bp_err[1]:sigmoid_grad(bp_err[1], output[1]) end + +function SigmoidLayer:get_params() + return {} +end diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua index cd57010..79e859e 100644 --- a/layer/softmax_ce.lua +++ b/layer/softmax_ce.lua @@ -50,3 +50,7 @@ function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output) end next_bp_err[1]:add(self.soutput, label, 1.0, -1.0) end + +function SoftmaxCELayer:get_params() + return {} +end diff --git a/layer/window.lua b/layer/window.lua index 8e9e761..b381c9b 100644 --- a/layer/window.lua +++ b/layer/window.lua @@ -22,3 +22,7 @@ function WindowLayer:propagate(input, output) output[1]:copy_fromd(input[1]) output[1]:scale_row(self.window.trans) end + +function WindowLayer:get_params() + return {self.window} +end |