From f8543464c13584e39bfacee694ee1ed80ac121f4 Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 26 May 2015 23:58:32 +0800 Subject: fix a severe bug in memory management of userdata --- layer/affine.lua | 22 +++++++++++++++------- layer/init.lua | 4 ++++ 2 files changed, 19 insertions(+), 7 deletions(-) (limited to 'layer') diff --git a/layer/affine.lua b/layer/affine.lua index cd2ba0b..221aacd 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -14,23 +14,31 @@ function AffineLayer:__init(id, global_conf, ltp, bp) self.ltp = ltp self.bp = bp self.gconf = global_conf +end + +function AffineLayer:init() -- linear transform correction - self.ltc = ltp:create() + self.ltc = self.ltp.trans:create() self.ltc:fill(0) -- bias correction - self.bc = bp:create() + self.bc = self.bp.trans:create() self.bc:fill(0) end function nerv.AffineLayer:update(bp_err, input, output) + local ltp = self.ltp.trans + local bp = self.bp.trans + local ltc = self.ltc + local bc = self.bc + local gconf = self.gconf -- momentum gain local mmt_gain = 1.0 / (1.0 - gconf.momentum); - local n = input.nrow() * mmt_gain + local n = input:nrow() * mmt_gain -- update corrections (accumulated errors) ltc:mul(input, bp_err, 1.0, gconf.momentum, 'T', 'N') bc:add(bc, bp_err:colsum(), gconf.momentum, 1.0) -- perform update - ltp:add(lpc, ltc, 1.0, -gconf.lrate / n) + ltp:add(ltp, ltc, 1.0, -gconf.lrate / n) bp:add(bp, bc, 1.0, -gconf.lrate / n) -- weight decay ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost) @@ -38,11 +46,11 @@ end function nerv.AffineLayer:propagate(input, output) -- apply linear transform - output:mul(input, self.ltp, 'N', 'N') + output:mul(input, self.ltp.trans, 1.0, 0.0, 'N', 'N') -- add bias - output:add_row(self.bp, 1.0) + output:add_row(self.bp.trans, 1.0) end function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output) - next_bp_err:mul(bp_err, self.ltp, 'N', 'T') + next_bp_err:mul(bp_err, self.ltp.trans, 1.0, 0.0, 'N', 'T') end diff --git a/layer/init.lua b/layer/init.lua index 6923dbd..0f0afe8 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -6,6 +6,10 @@ function nerv.Param:__init(id) self.id = id end +function nerv.Param:init(id) + nerv.error_method_not_implemented() +end + function nerv.Param:get_info() return self.info end -- cgit v1.2.3-70-g09d2