diff options
Diffstat (limited to 'nerv/layer')
-rw-r--r-- | nerv/layer/init.lua | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index b8b7ea1..32b82d8 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -72,18 +72,18 @@ end function Layer:find_param(pid, l_conf, gconf, p_type, p_dim) if l_conf[pid] ~= nil then - nerv.info("Param [%s] of layer [%s] found in layer_conf.\n", pid, self.id) + nerv.info("Param [%s] of layer [%s] found in layer_conf.", pid, self.id) return l_conf[pid] end local pid_g = self.id .. '_' .. pid --global identifier local pr = l_conf.pr local p if pr ~= nil and pr:has_param(pid_g) == true then - nerv.info("Param [%s] of layer [%s] found in layer_conf.paramRepo.\n", pid, self.id) + nerv.info("Param [%s] of layer [%s] found in layer_conf.paramRepo.", pid, self.id) p = pr:get_param(pid_g) return p end - nerv.info("Param [%s] of layer [%s] is not found in layer_conf or layer_conf.paramRepo, switch to auto-generate.\n", pid, self.id) + nerv.info("Param [%s] of layer [%s] is not found in layer_conf or layer_conf.paramRepo, switch to auto-generate.", pid, self.id) p = p_type(pid_g, gconf) p.trans = gconf.cumat_type(unpack(p_dim)) p.trans:generate(gconf.param_random) |