aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/affine.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-24 22:06:45 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-24 22:06:45 +0800
commit8e590ba284bfee414659f1845e175b41cac05d45 (patch)
treea812e760e3631263c18144c7c6bb4f7a332732af /nerv/layer/affine.lua
parent914a026734db6608e04987e9fcec9c82612e8673 (diff)
let affine supported multiple inputs
Diffstat (limited to 'nerv/layer/affine.lua')
-rw-r--r--nerv/layer/affine.lua36
1 files changed, 28 insertions, 8 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index e24a0c6..d56fcb8 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -64,25 +64,35 @@ function AffineLayer:__init(id, global_conf, layer_conf)
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[1], self.dim_out[1]}) --layer_conf.ltp
- self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]})--layer_conf.bp
+ for i = 2, #self.dim_in do
+ self["ltp" .. i] = self:find_param("ltp" .. i, layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[i], self.dim_out[1]})
+ end
+ self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]}) --layer_conf.bp
self.gconf = global_conf
- self:check_dim_len(1, 1) -- exactly one input and one output
- -- self.direct_update = layer_conf.direct_update or global_conf.direct_update
+ self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
end
function AffineLayer:init(batch_size)
if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
nerv.error("mismatching dimensions of linear transform and bias paramter")
end
+ self.bp:train_init()
if self.dim_in[1] ~= self.ltp.trans:nrow() then
nerv.error("mismatching dimensions of linear transform parameter and input")
end
if self.dim_out[1] ~= self.ltp.trans:ncol() then
nerv.error("mismatching dimensions of linear transform parameter and output")
end
- self.ltp_grad = self.ltp.trans:create()
self.ltp:train_init()
- self.bp:train_init()
+ for i = 2, #self.dim_in do
+ if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+ self["ltp" .. i]:train_init()
+ end
end
function AffineLayer:batch_resize(batch_size)
@@ -91,20 +101,30 @@ end
function AffineLayer:update(bp_err, input, output)
self.ltp:update_by_err_input(bp_err[1], input[1])
+ for i = 2, #self.dim_in do
+ self["ltp" .. i]:update_by_err_input(bp_err[1], input[i])
+ end
self.bp:update_by_gradient(bp_err[1]:colsum())
end
function AffineLayer:propagate(input, output)
- -- apply linear transform
output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
- -- add bias
+ for i = 2, #self.dim_in do
+ output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
+ end
output[1]:add_row(self.bp.trans, 1.0)
end
function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
+ for i = 2, #self.dim_in do
+ next_bp_err[i]:mul(bp_err[1], self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
+ end
end
function AffineLayer:get_params()
- return nerv.ParamRepo({self.ltp, self.bp})
+ local pr = nerv.ParamRepo({self.ltp, self.bp})
+ for i = 2, #self.dim_in do
+ pr:add(self["ltp" .. i].id, self["ltp" .. i])
+ end
end