aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/init.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/init.lua')
-rw-r--r--nerv/layer/init.lua2
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 67ebe1e..c6d0a98 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -86,7 +86,7 @@ function Layer:find_param(pid, l_conf, gconf, p_type, p_dim)
nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id)
p = p_type(pid_g, gconf)
p.trans = gconf.cumat_type(unpack(p_dim))
- p.trans:generate(global_conf.param_random)
+ p.trans:generate(gconf.param_random)
pr:add(pid_g, p) --add the parameter into the paramRepo
return p
end