diff options
author | Determinant <[email protected]> | 2015-05-26 18:56:29 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-05-26 18:56:29 +0800 |
commit | c7e170d4f3cc9f73380380a96318cbea437d48ba (patch) | |
tree | 41abed014da976a4d27a1cbdcfa3171d45657eac /layer/affine.lua | |
parent | fd389b72623dcb44009076c3819a74a79b6f94be (diff) |
add fill for cumatrix
Diffstat (limited to 'layer/affine.lua')
-rw-r--r-- | layer/affine.lua | 20 |
1 files changed, 18 insertions, 2 deletions
diff --git a/layer/affine.lua b/layer/affine.lua index 5f1b4ce..67c5854 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -10,16 +10,32 @@ function LinearTransParam:write(pfhandle) self.trans:new_to_host():save(pfhandle) end -function AffineLayer:__init(id, ltp, bp) +function AffineLayer:__init(id, global_conf, ltp, bp) self.ltp = ltp self.bp = bp + self.gconf = global_conf + -- linear transform correction + self.ltc = ltp:create() + self.ltc:fill(0) + -- bias correction + self.bc = bp:create() + self.bc:fill(0) end function nerv.AffineLayer:update(input, output) + -- momentum gain -- + mmt_gain = 1.0 / (1.0 - gconf.momentum); + n = input.nrow() * mmt_gain +-- ltc = end function nerv.AffineLayer:propagate(input, output) + -- apply linear transform + output:mul(input, self.ltp, 'N', 'N') + -- add bias + output:add_row(self.bp, 1.0) end -function nerv.AffineLayer:back_propagate(input, output) +function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output) + next_bp_err:mul(bp_err, self.ltp, 'N', 'T') end |