summaryrefslogtreecommitdiff
path: root/layer/affine.lua
diff options
context:
space:
mode:
Diffstat (limited to 'layer/affine.lua')
-rw-r--r--layer/affine.lua31
1 files changed, 22 insertions, 9 deletions
diff --git a/layer/affine.lua b/layer/affine.lua
index 573b98d..90a1d16 100644
--- a/layer/affine.lua
+++ b/layer/affine.lua
@@ -12,14 +12,27 @@ function MatrixParam:write(pfhandle)
self.trans:new_to_host():save(pfhandle)
end
-function AffineLayer:__init(id, global_conf, ltp, bp)
+function AffineLayer:__init(id, global_conf, layer_conf)
self.id = id
- self.ltp = ltp
- self.bp = bp
+ self.ltp = layer_conf.ltp
+ self.bp = layer_conf.bp
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
self.gconf = global_conf
+ self:check_dim_len(1, 1) -- exactly one input and one output
end
function AffineLayer:init()
+ if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform and bias paramter")
+ end
+ if self.dim_in[1] ~= self.ltp.trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self.ltp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+
-- linear transform correction
self.ltc = self.ltp.trans:create()
self.ltc:fill(0)
@@ -36,10 +49,10 @@ function nerv.AffineLayer:update(bp_err, input, output)
local gconf = self.gconf
-- momentum gain
local mmt_gain = 1.0 / (1.0 - gconf.momentum);
- local n = input[0]:nrow() * mmt_gain
+ local n = input[1]:nrow() * mmt_gain
-- update corrections (accumulated errors)
- ltc:mul(input[0], bp_err[0], 1.0, gconf.momentum, 'T', 'N')
- bc:add(bc, bp_err[0]:colsum(), gconf.momentum, 1.0)
+ ltc:mul(input[1], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
+ bc:add(bc, bp_err[1]:colsum(), gconf.momentum, 1.0)
-- perform update
ltp:add(ltp, ltc, 1.0, -gconf.lrate / n)
bp:add(bp, bc, 1.0, -gconf.lrate / n)
@@ -49,11 +62,11 @@ end
function nerv.AffineLayer:propagate(input, output)
-- apply linear transform
- output[0]:mul(input[0], self.ltp.trans, 1.0, 0.0, 'N', 'N')
+ output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
-- add bias
- output[0]:add_row(self.bp.trans, 1.0)
+ output[1]:add_row(self.bp.trans, 1.0)
end
function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output)
- next_bp_err[0]:mul(bp_err[0], self.ltp.trans, 1.0, 0.0, 'N', 'T')
+ next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
end