diff options
author | txh18 <[email protected]> | 2015-11-23 16:00:57 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-11-23 16:00:57 +0800 |
commit | 884cb00dd3aca43fbe6d5b72cfc43264cccb8a86 (patch) | |
tree | 4f06c3a55152a62ad1580603db53e611a6762ec4 | |
parent | e76ae9b12651ed8497537edf357f4cf90421ea0d (diff) | |
parent | e10e9dc4a517d2adc84dbe46c6ccded9043a81e4 (diff) |
small bug fix
-rw-r--r-- | nerv/layer/affine.lua | 7 | ||||
-rw-r--r-- | nerv/layer/init.lua | 2 |
2 files changed, 5 insertions, 4 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua index ed58d38..e24a0c6 100644 --- a/nerv/layer/affine.lua +++ b/nerv/layer/affine.lua @@ -21,7 +21,7 @@ function MatrixParam:_update_by_gradient(gradient, alpha, beta) local gconf = self.gconf -- momentum gain local mmt_gain = 1.0 / (1.0 - gconf.momentum) - local n = self.gconf.batch_size * mmt_gain + local n = gconf.batch_size * mmt_gain -- perform update if gconf.momentum > 0 then self.correction:add(self.correction, gradient, gconf.momentum, 1.0) @@ -35,7 +35,7 @@ function MatrixParam:_update_by_err_input(err, input, alpha, beta) local gconf = self.gconf -- momentum gain local mmt_gain = 1.0 / (1.0 - gconf.momentum) - local n = self.gconf.batch_size * mmt_gain + local n = gconf.batch_size * mmt_gain -- perform update if gconf.momentum > 0 then self.correction:mul(input, err, 1.0, gconf.momentum, 'T', 'N') @@ -54,7 +54,8 @@ function MatrixParam:update_by_err_input(err, input) end function LinearTransParam:update_by_err_input(err, input) - local l2 = 1 - self.gconf.lrate * self.gconf.wcost + local gconf = self.gconf + local l2 = 1 - gconf.lrate * gconf.wcost self:_update_by_err_input(err, input, l2, l2) end diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 67ebe1e..c6d0a98 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -86,7 +86,7 @@ function Layer:find_param(pid, l_conf, gconf, p_type, p_dim) nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id) p = p_type(pid_g, gconf) p.trans = gconf.cumat_type(unpack(p_dim)) - p.trans:generate(global_conf.param_random) + p.trans:generate(gconf.param_random) pr:add(pid_g, p) --add the parameter into the paramRepo return p end |