aboutsummaryrefslogtreecommitdiff
path: root/nerv
diff options
context:
space:
mode:
authorQi Liu <[email protected]>2016-03-02 15:38:55 +0800
committerQi Liu <[email protected]>2016-03-02 15:38:55 +0800
commita87f8954c97cf633a0100c9108764bca8c43a083 (patch)
tree88679743838a3c885551562dab0b1bab8cc0d10d /nerv
parent31e575379fa46eb8f76f00ba62e11626ed67ca72 (diff)
add identity layer
Diffstat (limited to 'nerv')
-rw-r--r--nerv/layer/duplicate.lua12
-rw-r--r--nerv/layer/identity.lua33
-rw-r--r--nerv/nn/network.lua18
3 files changed, 53 insertions, 10 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
index 58758e8..fbd4a9e 100644
--- a/nerv/layer/duplicate.lua
+++ b/nerv/layer/duplicate.lua
@@ -16,13 +16,10 @@ function DuplicateLayer:__init(id, global_conf, layer_conf)
end
end
-function DuplicateLayer:init(batch_size)
+function DuplicateLayer:init()
end
-function DuplicateLayer:batch_resize(batch_size)
-end
-
-function DuplicateLayer:update(bp_err, input, output)
+function DuplicateLayer:batch_resize()
end
function DuplicateLayer:propagate(input, output)
@@ -32,9 +29,12 @@ function DuplicateLayer:propagate(input, output)
end
end
-function DuplicateLayer:back_propagate(bp_err, next_bp_err, input, output)
+function DuplicateLayer:back_propagate(bp_err, next_bp_err)
next_bp_err:copy_from(bp_err[1])
for i = 2, #self.dim_out do
next_bp_err:add(next_bp_err, bp_err[i], 1.0, 1.0)
end
end
+
+function DuplicateLayer:update()
+end
diff --git a/nerv/layer/identity.lua b/nerv/layer/identity.lua
new file mode 100644
index 0000000..dc796fb
--- /dev/null
+++ b/nerv/layer/identity.lua
@@ -0,0 +1,33 @@
+local IdentityLayer = nerv.class('nerv.IdentityLayer', 'nerv.Layer')
+
+function IdentityLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(1, 1)
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error('mismatching dimensions of input and output')
+ end
+end
+
+function IdentityLayer:init()
+end
+
+function IdentityLayer:batch_resize()
+end
+
+function IdentityLayer:propagate(input, output)
+ output[1]:copy_from(input[1])
+end
+
+function IdentityLayer:back_propagate(bp_err, next_bp_err)
+ next_bp_err[1]:copy_from(bp_err)
+end
+
+function IdentityLayer:update()
+end
+
+function IdentityLayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/nn/network.lua b/nerv/nn/network.lua
index e1a9629..3cf052b 100644
--- a/nerv/nn/network.lua
+++ b/nerv/nn/network.lua
@@ -118,7 +118,7 @@ function network:init(batch_size, chunk_size)
end
function network:topsort()
- nerv.info('Network topology sort')
+ nerv.info('network topology sort')
local degree = {}
for t = 1, self.chunk_size do
degree[t] = {}
@@ -133,7 +133,7 @@ function network:topsort()
for j = 1, #dim_out do
if self.output_conn[i][j] ~= nil then
local edge = self.output_conn[i][j]
- local id, _, time = edge[1], edge[2], edge[3] + t
+ local id, time = edge[1], edge[3] + t
if time >= 1 and time <= self.chunk_size and id ~= 0 then
degree[time][id] = degree[time][id] + 1
end
@@ -160,7 +160,7 @@ function network:topsort()
for j = 1, #dim_out do
if self.output_conn[i][j] ~= nil then
local edge = self.output_conn[i][j]
- local id, _, time = edge[1], edge[2], edge[3] + t
+ local id, time = edge[1], edge[3] + t
if time >= 1 and time <= self.chunk_size and id ~= 0 then
degree[time][id] = degree[time][id] - 1
if degree[time][id] == 0 then
@@ -178,7 +178,7 @@ function network:topsort()
end
function network:make_initial_store()
- nerv.info('Network initing storage')
+ nerv.info('network initing storage')
-- allocate memory
local memory = {}
@@ -386,6 +386,7 @@ function network:mini_batch_init(information)
table.insert(self.border[chunk], i)
end
end
+ -- copy legacy
for t = 1 - self.delay, 0 do
for i = 1, #self.layers do
local _, dim_out = self.layers[i]:get_dim()
@@ -398,6 +399,7 @@ function network:mini_batch_init(information)
end
end
end
+ -- flush border gradient
for t = self.max_length + 1, self.max_length + self.delay do
if t > self.chunk_size then
break
@@ -419,6 +421,7 @@ function network:propagate(input, output)
if t <= self.max_length then
self.layers[id]:propagate(self.input[t][id], self.output[t][id], t)
end
+ -- flush border activation
for j = 1, #self.border[t] do
local batch = self.border[t][j]
local _, dim_out = self.layers[id]:get_dim()
@@ -437,6 +440,7 @@ function network:back_propagate(bp_err, next_bp_err, input, output)
for i = #self.queue, 1, -1 do
local t, id = self.queue[i].chunk, self.queue[i].id
if t <= self.max_length then
+ -- flush border gradient
for j = 1, #self.border[t] do
local batch = self.border[t][j]
local dim_in, _ = self.layers[id]:get_dim()
@@ -445,6 +449,12 @@ function network:back_propagate(bp_err, next_bp_err, input, output)
end
end
self.layers[id]:back_propagate(self.err_input[t][id], self.err_output[t][id], self.input[t][id], self.output[t][id], t)
+ if self.clip ~= nil then
+ local dim_in, _ = self.layers[id]:get_dim()
+ for j = 1, #dim_in do
+ self.err_output[t][id][j]:clip(-self.clip, self.clip)
+ end
+ end
end
end
end