summaryrefslogtreecommitdiff
path: root/layer
diff options
context:
space:
mode:
authorcloudygoose <[email protected]>2015-06-05 21:40:45 +0800
committercloudygoose <[email protected]>2015-06-05 21:40:45 +0800
commit5b4cc22736ade93f4d8348513c4a35f6a9f9be04 (patch)
tree255fbddedcdb25b88f4a70268cb6b1ffbaa5afed /layer
parent90f2b7c257c286e6c52432ed43807f332d97cc7e (diff)
parent37af4bed9c3680fdb9db569605f15013e9b6b64d (diff)
...
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'layer')
-rw-r--r--layer/affine.lua10
-rw-r--r--layer/bias.lua4
-rw-r--r--layer/init.lua4
-rw-r--r--layer/sigmoid.lua4
-rw-r--r--layer/softmax_ce.lua4
-rw-r--r--layer/window.lua4
6 files changed, 27 insertions, 3 deletions
diff --git a/layer/affine.lua b/layer/affine.lua
index 59a0e91..2cd7acb 100644
--- a/layer/affine.lua
+++ b/layer/affine.lua
@@ -41,7 +41,7 @@ function AffineLayer:init()
self.bc:fill(0)
end
-function nerv.AffineLayer:update(bp_err, input, output)
+function AffineLayer:update(bp_err, input, output)
local ltp = self.ltp.trans
local bp = self.bp.trans
local ltc = self.ltc
@@ -60,13 +60,17 @@ function nerv.AffineLayer:update(bp_err, input, output)
ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost)
end
-function nerv.AffineLayer:propagate(input, output)
+function AffineLayer:propagate(input, output)
-- apply linear transform
output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
-- add bias
output[1]:add_row(self.bp.trans, 1.0)
end
-function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output)
+function AffineLayer:back_propagate(next_bp_err, bp_err, input, output)
next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
end
+
+function AffineLayer:get_params()
+ return {self.ltp, self.bp}
+end
diff --git a/layer/bias.lua b/layer/bias.lua
index 6ddfe11..8cd326b 100644
--- a/layer/bias.lua
+++ b/layer/bias.lua
@@ -22,3 +22,7 @@ function BiasLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:add_row(self.bias.trans, 1.0)
end
+
+function BiasLayer:get_params()
+ return {self.bias}
+end
diff --git a/layer/init.lua b/layer/init.lua
index 38bcd7f..3011f8e 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -58,6 +58,10 @@ function Layer:check_dim_len(len_in, len_out)
end
end
+function Layer:get_params()
+ nerv.error_method_not_implemented()
+end
+
function Layer:get_dim()
return self.dim_in, self.dim_out
end
diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua
index 220b7af..dd10fb9 100644
--- a/layer/sigmoid.lua
+++ b/layer/sigmoid.lua
@@ -25,3 +25,7 @@ end
function SigmoidLayer:back_propagate(next_bp_err, bp_err, input, output)
next_bp_err[1]:sigmoid_grad(bp_err[1], output[1])
end
+
+function SigmoidLayer:get_params()
+ return {}
+end
diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua
index cd57010..79e859e 100644
--- a/layer/softmax_ce.lua
+++ b/layer/softmax_ce.lua
@@ -50,3 +50,7 @@ function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output)
end
next_bp_err[1]:add(self.soutput, label, 1.0, -1.0)
end
+
+function SoftmaxCELayer:get_params()
+ return {}
+end
diff --git a/layer/window.lua b/layer/window.lua
index 8e9e761..b381c9b 100644
--- a/layer/window.lua
+++ b/layer/window.lua
@@ -22,3 +22,7 @@ function WindowLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:scale_row(self.window.trans)
end
+
+function WindowLayer:get_params()
+ return {self.window}
+end