diff options
Diffstat (limited to 'nerv/layer/init.lua')
-rw-r--r-- | nerv/layer/init.lua | 23 |
1 files changed, 23 insertions, 0 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 6861b0e..23606e1 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -70,8 +70,29 @@ function Layer:get_dim() return self.dim_in, self.dim_out end +function Layer:find_param(pid, l_conf, gconf, p_type, p_dim) + if l_conf[pid] ~= nil then + nerv.info("Param [%s] of layer [%s] found in layer_conf.", pid, self.id) + return l_conf[pid] + end + local pid_g = self.id .. '_' .. pid --global identifier + local pr = l_conf.pr + local p + if pr ~= nil and pr:has_param(pid_g) == true then + nerv.info("Param [%s] of layer [%s] found in layer_conf.paramRepo.", pid, self.id) + p = pr:get_param(pid_g) + return p + end + nerv.info("Param [%s] of layer [%s] is not found in layer_conf or layer_conf.paramRepo, switch to auto-generate.", pid, self.id) + p = p_type(pid_g, gconf) + p.trans = gconf.cumat_type(unpack(p_dim)) + p.trans:generate(gconf.param_random) + return p +end + nerv.include('affine.lua') nerv.include('sigmoid.lua') +nerv.include('tanh.lua') nerv.include('softmax_ce.lua') nerv.include('bias.lua') nerv.include('window.lua') @@ -79,3 +100,5 @@ nerv.include('mse.lua') nerv.include('combiner.lua') nerv.include('affine_recurrent.lua') nerv.include('softmax.lua') +nerv.include('elem_mul.lua') +nerv.include('gate_fff.lua') |