diff options
author | Determinant <[email protected]> | 2015-05-26 19:17:46 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-05-26 19:17:46 +0800 |
commit | 910640c0ef7c43d586180241f79723973e0e7d35 (patch) | |
tree | 0763379bba37c058b3a1504a6c4305d2afb7217f | |
parent | c7e170d4f3cc9f73380380a96318cbea437d48ba (diff) |
add affine layer implementation
-rw-r--r-- | layer/affine.lua | 17 |
1 files changed, 12 insertions, 5 deletions
diff --git a/layer/affine.lua b/layer/affine.lua index 67c5854..cd2ba0b 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -22,11 +22,18 @@ function AffineLayer:__init(id, global_conf, ltp, bp) self.bc:fill(0) end -function nerv.AffineLayer:update(input, output) - -- momentum gain -- - mmt_gain = 1.0 / (1.0 - gconf.momentum); - n = input.nrow() * mmt_gain --- ltc = +function nerv.AffineLayer:update(bp_err, input, output) + -- momentum gain + local mmt_gain = 1.0 / (1.0 - gconf.momentum); + local n = input.nrow() * mmt_gain + -- update corrections (accumulated errors) + ltc:mul(input, bp_err, 1.0, gconf.momentum, 'T', 'N') + bc:add(bc, bp_err:colsum(), gconf.momentum, 1.0) + -- perform update + ltp:add(lpc, ltc, 1.0, -gconf.lrate / n) + bp:add(bp, bc, 1.0, -gconf.lrate / n) + -- weight decay + ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost) end function nerv.AffineLayer:propagate(input, output) |