aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--nerv/layer/affine.lua7
-rw-r--r--nerv/layer/init.lua2
2 files changed, 5 insertions, 4 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index ed58d38..e24a0c6 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -21,7 +21,7 @@ function MatrixParam:_update_by_gradient(gradient, alpha, beta)
local gconf = self.gconf
-- momentum gain
local mmt_gain = 1.0 / (1.0 - gconf.momentum)
- local n = self.gconf.batch_size * mmt_gain
+ local n = gconf.batch_size * mmt_gain
-- perform update
if gconf.momentum > 0 then
self.correction:add(self.correction, gradient, gconf.momentum, 1.0)
@@ -35,7 +35,7 @@ function MatrixParam:_update_by_err_input(err, input, alpha, beta)
local gconf = self.gconf
-- momentum gain
local mmt_gain = 1.0 / (1.0 - gconf.momentum)
- local n = self.gconf.batch_size * mmt_gain
+ local n = gconf.batch_size * mmt_gain
-- perform update
if gconf.momentum > 0 then
self.correction:mul(input, err, 1.0, gconf.momentum, 'T', 'N')
@@ -54,7 +54,8 @@ function MatrixParam:update_by_err_input(err, input)
end
function LinearTransParam:update_by_err_input(err, input)
- local l2 = 1 - self.gconf.lrate * self.gconf.wcost
+ local gconf = self.gconf
+ local l2 = 1 - gconf.lrate * gconf.wcost
self:_update_by_err_input(err, input, l2, l2)
end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 67ebe1e..c6d0a98 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -86,7 +86,7 @@ function Layer:find_param(pid, l_conf, gconf, p_type, p_dim)
nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id)
p = p_type(pid_g, gconf)
p.trans = gconf.cumat_type(unpack(p_dim))
- p.trans:generate(global_conf.param_random)
+ p.trans:generate(gconf.param_random)
pr:add(pid_g, p) --add the parameter into the paramRepo
return p
end