aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-25 23:42:37 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-25 23:42:37 +0800
commit75a2d6a2a08caf987017f5a9043ac93afcd70980 (patch)
treed3b08fa846de56f3cd8a3f9a7bd3add200840ea8 /nerv/layer
parentca3500f01ea7ce695a4dbf70d2be8244827097c9 (diff)
changed auto-generating params, won not save in global_conf.param
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua1
-rw-r--r--nerv/layer/init.lua12
-rw-r--r--nerv/layer/tanh.lua35
3 files changed, 42 insertions, 6 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index d56fcb8..566e9bc 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -127,4 +127,5 @@ function AffineLayer:get_params()
for i = 2, #self.dim_in do
pr:add(self["ltp" .. i].id, self["ltp" .. i])
end
+ return pr
end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index c6d0a98..b8b7ea1 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -72,27 +72,27 @@ end
function Layer:find_param(pid, l_conf, gconf, p_type, p_dim)
if l_conf[pid] ~= nil then
- nerv.printf("Param [%s] of layer [%s] found in layer_conf.\n", pid, self.id)
+ nerv.info("Param [%s] of layer [%s] found in layer_conf.\n", pid, self.id)
return l_conf[pid]
end
local pid_g = self.id .. '_' .. pid --global identifier
- local pr = gconf.paramRepo
+ local pr = l_conf.pr
local p
- if pr:has_param(pid_g) == true then
- nerv.printf("Param [%s] of layer [%s] found in paramRepo.\n", pid, self.id)
+ if pr ~= nil and pr:has_param(pid_g) == true then
+ nerv.info("Param [%s] of layer [%s] found in layer_conf.paramRepo.\n", pid, self.id)
p = pr:get_param(pid_g)
return p
end
- nerv.printf("Param [%s] of layer [%s] is not found in layer_conf or paramRepo, switch to auto-generate.\n", pid, self.id)
+ nerv.info("Param [%s] of layer [%s] is not found in layer_conf or layer_conf.paramRepo, switch to auto-generate.\n", pid, self.id)
p = p_type(pid_g, gconf)
p.trans = gconf.cumat_type(unpack(p_dim))
p.trans:generate(gconf.param_random)
- pr:add(pid_g, p) --add the parameter into the paramRepo
return p
end
nerv.include('affine.lua')
nerv.include('sigmoid.lua')
+nerv.include('tanh.lua')
nerv.include('softmax_ce.lua')
nerv.include('bias.lua')
nerv.include('window.lua')
diff --git a/nerv/layer/tanh.lua b/nerv/layer/tanh.lua
new file mode 100644
index 0000000..e1c32f2
--- /dev/null
+++ b/nerv/layer/tanh.lua
@@ -0,0 +1,35 @@
+local TanhLayer = nerv.class("nerv.TanhLayer", "nerv.Layer")
+
+function TanhLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1)
+end
+
+function TanhLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
+end
+
+function TanhLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
+function TanhLayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function TanhLayer:propagate(input, output)
+ output[1]:tanh(input[1])
+end
+
+function TanhLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:tanh_grad(bp_err[1], output[1])
+end
+
+function TanhLayer:get_params()
+ return nerv.ParamRepo({})
+end