diff options
author | txh18 <cloudygooseg@gmail.com> | 2015-11-23 16:00:57 +0800 |
---|---|---|
committer | txh18 <cloudygooseg@gmail.com> | 2015-11-23 16:00:57 +0800 |
commit | 884cb00dd3aca43fbe6d5b72cfc43264cccb8a86 (patch) | |
tree | 4f06c3a55152a62ad1580603db53e611a6762ec4 /nerv/layer/init.lua | |
parent | e76ae9b12651ed8497537edf357f4cf90421ea0d (diff) | |
parent | e10e9dc4a517d2adc84dbe46c6ccded9043a81e4 (diff) |
small bug fix
Diffstat (limited to 'nerv/layer/init.lua')
-rw-r--r-- | nerv/layer/init.lua | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 67ebe1e..c6d0a98 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -86,7 +86,7 @@ function Layer:find_param(pid, l_conf, gconf, p_type, p_dim) nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id) p = p_type(pid_g, gconf) p.trans = gconf.cumat_type(unpack(p_dim)) - p.trans:generate(global_conf.param_random) + p.trans:generate(gconf.param_random) pr:add(pid_g, p) --add the parameter into the paramRepo return p end |