aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-20 19:58:14 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-20 19:58:14 +0800
commit4f5b45b79b8d5f6a9094888cf6b929fe86ac24a3 (patch)
tree60d5b6232b8d33da9178717c27ecb6dc3591d0b6 /nerv/layer
parent6456f5d0b97c5ad7e35c58751f74b8c9fefb635e (diff)
working on automatic parameter for layers
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua4
-rw-r--r--nerv/layer/init.lua21
2 files changed, 23 insertions, 2 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index 6a541e8..3e84ec0 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -63,10 +63,10 @@ end
function AffineLayer:__init(id, global_conf, layer_conf)
self.id = id
- self.ltp = layer_conf.ltp
- self.bp = layer_conf.bp
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
+ self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[1], self.dim_out[1]}) --layer_conf.ltp
+ self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]})--layer_conf.bp
self.gconf = global_conf
self:check_dim_len(1, 1) -- exactly one input and one output
self.direct_update = layer_conf.direct_update or global_conf.direct_update
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 6861b0e..d268caa 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -70,6 +70,27 @@ function Layer:get_dim()
return self.dim_in, self.dim_out
end
+function Layer:find_param(pid, l_conf, gconf, p_type, p_dim)
+ if l_conf[pid] ~= nil then
+ nerv.printf("Param [%s] of layer [%s] found in layer_conf.\n", pid, self.id)
+ return l_conf[pid]
+ end
+ local pid_g = self.id .. '_' .. pid --global identifier
+ local pr = gconf.paramRepo
+ local p
+ p = pr:get_param(pid_g)
+ if p ~= nil then
+ nerv.printf("Param [%s] of layer [%s] found in paramRepo.\n", pid, self.id)
+ return p
+ end
+ nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id)
+ p = p_type(pid_g, gconf)
+ p.trans = gconf.cumat_type(unpack(p_dim))
+ p.trans:generate(global_conf.param_random)
+ pr:add(pid_g, p) --add the parameter into the paramRepo
+ return p
+end
+
nerv.include('affine.lua')
nerv.include('sigmoid.lua')
nerv.include('softmax_ce.lua')