summaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/duplicate.lua41
-rw-r--r--nerv/layer/graph.lua156
-rw-r--r--nerv/layer/gru.lua4
-rw-r--r--nerv/layer/identity.lua30
-rw-r--r--nerv/layer/init.lua12
-rw-r--r--nerv/layer/lstm.lua52
-rw-r--r--nerv/layer/rnn.lua38
7 files changed, 305 insertions, 28 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
new file mode 100644
index 0000000..137472b
--- /dev/null
+++ b/nerv/layer/duplicate.lua
@@ -0,0 +1,41 @@
+local DuplicateLayer = nerv.class('nerv.DuplicateLayer', 'nerv.Layer')
+
+function DuplicateLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, -1)
+ if #self.dim_out < 1 then
+ nerv.error('no output specified')
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= self.dim_in[1] then
+ nerv.error('mismatching dimensions of outputs')
+ end
+ end
+end
+
+function DuplicateLayer:init()
+end
+
+function DuplicateLayer:batch_resize()
+end
+
+function DuplicateLayer:propagate(input, output)
+ for i = 1, #self.dim_out do
+ output[i]:copy_from(input[1])
+ -- FIXME: use reference copy to speed up
+ end
+end
+
+function DuplicateLayer:back_propagate(bp_err, next_bp_err)
+ next_bp_err[1]:copy_from(bp_err[1])
+ for i = 2, #self.dim_out do
+ next_bp_err[1]:add(next_bp_err[1], bp_err[i], 1.0, 1.0)
+ end
+end
+
+function DuplicateLayer:update()
+end
+
+function DuplicateLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end
diff --git a/nerv/layer/graph.lua b/nerv/layer/graph.lua
new file mode 100644
index 0000000..5f42fca
--- /dev/null
+++ b/nerv/layer/graph.lua
@@ -0,0 +1,156 @@
+local GraphLayer = nerv.class('nerv.GraphLayer', 'nerv.Layer')
+
+function GraphLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:graph_init(layer_conf.layer_repo, layer_conf.connections)
+end
+
+local function parse_id(str)
+ local id, port, _
+ _, _, id, port = string.find(str, "([a-zA-Z0-9_.]+)%[([0-9]+)%]")
+ if id == nil or port == nil then
+ _, _, id, port = string.find(str, "(.+)%[([0-9]+)%]")
+ if not (id == "<input>" or id == "<output>") then
+ nerv.error("wrong format of connection id")
+ end
+ end
+ port = tonumber(port)
+ return id, port
+end
+
+function GraphLayer:add_prefix(layers, connections)
+ local function ap(name)
+ return self.id .. '.' .. name
+ end
+
+ for layer_type, sublayers in pairs(layers) do
+ local tmp = {}
+ for name, layer_config in pairs(sublayers) do
+ tmp[ap(name)] = layer_config
+ end
+ layers[layer_type] = tmp
+ end
+
+ for i = 1, #connections do
+ local from, to = connections[i][1], connections[i][2]
+ if parse_id(from) ~= '<input>' then
+ connections[i][1] = ap(from)
+ end
+ if parse_id(to) ~= '<output>' then
+ connections[i][2] = ap(to)
+ end
+ end
+end
+
+function GraphLayer:discover(id, layer_repo)
+ if id == '<output>' then
+ id = '<input>'
+ end
+ local layers = self.layers
+ local ref = layers[id]
+ if ref == nil then
+ local layer = layer_repo:get_layer(id)
+ local dim_in, dim_out = layer:get_dim()
+ self.layer_num = self.layer_num + 1
+ ref = {
+ layer = layer,
+ inputs = {},
+ outputs = {},
+ dim_in = dim_in,
+ dim_out = dim_out,
+ id = self.layer_num,
+ }
+ layers[id] = ref
+ end
+ return ref
+end
+
+function GraphLayer:graph_init(layer_repo, connections)
+ local layers = {}
+ layers['<input>'] = {
+ inputs = {},
+ outputs = {},
+ dim_in = self.dim_out,
+ dim_out = self.dim_in,
+ id = 0,
+ }
+ self.layers = layers
+ self.layer_num = 0
+ self.connections = {}
+
+ -- check data dimension between connected ports
+ for _, edge in pairs(connections) do
+ local from, to, time = edge[1], edge[2], edge[3]
+ local id_from, port_from = parse_id(from)
+ local id_to, port_to = parse_id(to)
+ local ref_from = self:discover(id_from, layer_repo)
+ local ref_to = self:discover(id_to, layer_repo)
+ if ref_from.outputs[port_from] ~= nil then
+ nerv.error('%s has already been attached', from)
+ end
+ if ref_to.inputs[port_to] ~= nil then
+ nerv.error('%s has already been attached', to)
+ end
+ if ref_from.dim_out[port_from] ~= ref_to.dim_in[port_to] then
+ nerv.error('mismatching data dimension between %s and %s', from, to)
+ end
+ if ref_from.id == 0 and ref_to.id == 0 then
+ nerv.error('short-circuit connection between <input> and <output>')
+ end
+ ref_from.outputs[port_from] = true
+ ref_to.inputs[port_to] = true
+ table.insert(self.connections, {ref_from.id, port_from, ref_to.id, port_to, time})
+ end
+
+ -- check dangling ports
+ for id, ref in pairs(layers) do
+ if id ~= '<input>' then
+ for i = 1, #ref.dim_in do
+ if ref.inputs[i] == nil then
+ nerv.error('dangling input port %d of layer %s', i, id)
+ end
+ end
+ for i = 1, #ref.dim_out do
+ if ref.outputs[i] == nil then
+ nerv.error('dangling output port %d os layer %s', i, id)
+ end
+ end
+ end
+ end
+ for i = 1, #self.dim_in do
+ if layers['<input>'].outputs[i] == nil then
+ nerv.error('dangling port %d of layer <input>', i)
+ end
+ end
+ for i = 1, #self.dim_out do
+ if layers['<input>'].inputs[i] == nil then
+ nerv.error('dangling port %d of layer <output>', i)
+ end
+ end
+end
+
+function GraphLayer:set_attr(name, value)
+ self[name] = value
+ for id, ref in pairs(self.layers) do
+ if id ~= '<input>' then
+ ref.layer:set_attr(name, value)
+ end
+ end
+end
+
+function GraphLayer:get_sublayer(id)
+ if self.layers[id] == nil or id == '<input>' then
+ nerv.error('layer with id %s not found', id)
+ end
+ return self.layers[id].layer
+end
+
+function GraphLayer:get_params()
+ local param_repos = {}
+ for id, ref in pairs(self.layers) do
+ if id ~= '<input>' then
+ table.insert(param_repos, ref.layer:get_params())
+ end
+ end
+ return nerv.ParamRepo.merge(param_repos, self.loc_type)
+end
diff --git a/nerv/layer/gru.lua b/nerv/layer/gru.lua
index a590a67..71718d7 100644
--- a/nerv/layer/gru.lua
+++ b/nerv/layer/gru.lua
@@ -13,7 +13,7 @@ function GRULayer:__init(id, global_conf, layer_conf)
-- prepare a DAGLayer to hold the lstm structure
local pr = layer_conf.pr
if pr == nil then
- pr = nerv.ParamRepo(nil, self.loc_type)
+ pr = nerv.ParamRepo({}, self.loc_type)
end
local function ap(str)
@@ -102,7 +102,7 @@ end
function GRULayer:bind_params()
local pr = layer_conf.pr
if pr == nil then
- pr = nerv.ParamRepo(nil, self.loc_type)
+ pr = nerv.ParamRepo({}, self.loc_type)
end
self.lrepo:rebind(pr)
end
diff --git a/nerv/layer/identity.lua b/nerv/layer/identity.lua
new file mode 100644
index 0000000..d56337d
--- /dev/null
+++ b/nerv/layer/identity.lua
@@ -0,0 +1,30 @@
+local IdentityLayer = nerv.class('nerv.IdentityLayer', 'nerv.Layer')
+
+function IdentityLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error('mismatching dimensions of input and output')
+ end
+end
+
+function IdentityLayer:init()
+end
+
+function IdentityLayer:batch_resize()
+end
+
+function IdentityLayer:propagate(input, output)
+ output[1]:copy_from(input[1])
+end
+
+function IdentityLayer:back_propagate(bp_err, next_bp_err)
+ next_bp_err[1]:copy_from(bp_err[1])
+end
+
+function IdentityLayer:update()
+end
+
+function IdentityLayer:get_params()
+ return nerv.ParamRepo({}, self.loc_type)
+end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 146ad8c..475ef62 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -85,6 +85,14 @@ function Layer:get_dim()
return self.dim_in, self.dim_out
end
+function Layer:set_attr(name, value)
+ self[name] = value
+end
+
+function Layer:get_sublayer(id)
+ nerv.error('primitive layer does not have sublayers')
+end
+
function Layer:find_param(plist, lconf, gconf, p_type, p_dim)
if type(plist) == "string" then
plist = {plist}
@@ -119,6 +127,7 @@ function Layer:find_param(plist, lconf, gconf, p_type, p_dim)
return p
end
+nerv.include('graph.lua')
nerv.include('affine.lua')
nerv.include('sigmoid.lua')
nerv.include('tanh.lua')
@@ -133,6 +142,9 @@ nerv.include('lstm.lua')
nerv.include('lstm_gate.lua')
nerv.include('dropout.lua')
nerv.include('gru.lua')
+nerv.include('rnn.lua')
+nerv.include('duplicate.lua')
+nerv.include('identity.lua')
-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua
index d4c9212..641d5dc 100644
--- a/nerv/layer/lstm.lua
+++ b/nerv/layer/lstm.lua
@@ -8,7 +8,7 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
-- prepare a DAGLayer to hold the lstm structure
local pr = layer_conf.pr
if pr == nil then
- pr = nerv.ParamRepo(nil, self.loc_type)
+ pr = nerv.ParamRepo({}, self.loc_type)
end
local function ap(str)
@@ -18,47 +18,47 @@ function LSTMLayer:__init(id, global_conf, layer_conf)
local dout1, dout2, dout3 = self.dim_out[1], self.dim_out[2], self.dim_out[3]
local layers = {
["nerv.CombinerLayer"] = {
- [ap("inputXDup")] = {{}, {dim_in = {din1},
+ [ap("inputXDup")] = {dim_in = {din1},
dim_out = {din1, din1, din1, din1},
- lambda = {1}}},
+ lambda = {1}},
- [ap("inputHDup")] = {{}, {dim_in = {din2},
+ [ap("inputHDup")] = {dim_in = {din2},
dim_out = {din2, din2, din2, din2},
- lambda = {1}}},
+ lambda = {1}},
- [ap("inputCDup")] = {{}, {dim_in = {din3},
+ [ap("inputCDup")] = {dim_in = {din3},
dim_out = {din3, din3, din3},
- lambda = {1}}},
+ lambda = {1}},
- [ap("mainCDup")] = {{}, {dim_in = {din3, din3},
+ [ap("mainCDup")] = {dim_in = {din3, din3},
dim_out = {din3, din3, din3},
- lambda = {1, 1}}},
+ lambda = {1, 1}},
},
["nerv.AffineLayer"] = {
- [ap("mainAffineL")] = {{}, {dim_in = {din1, din2},
+ [ap("mainAffineL")] = {dim_in = {din1, din2},
dim_out = {dout1},
- pr = pr}},
+ pr = pr},
},
["nerv.TanhLayer"] = {
- [ap("mainTanhL")] = {{}, {dim_in = {dout1}, dim_out = {dout1}}},
- [ap("outputTanhL")] = {{}, {dim_in = {dout1}, dim_out = {dout1}}},
+ [ap("mainTanhL")] = {dim_in = {dout1}, dim_out = {dout1}},
+ [ap("outputTanhL")] = {dim_in = {dout1}, dim_out = {dout1}},
},
["nerv.LSTMGateLayer"] = {
- [ap("forgetGateL")] = {{}, {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr}},
- [ap("inputGateL")] = {{}, {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr}},
- [ap("outputGateL")] = {{}, {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr}},
+ [ap("forgetGateL")] = {dim_in = {din1, din2, din3},
+ dim_out = {din3}, pr = pr},
+ [ap("inputGateL")] = {dim_in = {din1, din2, din3},
+ dim_out = {din3}, pr = pr},
+ [ap("outputGateL")] = {dim_in = {din1, din2, din3},
+ dim_out = {din3}, pr = pr},
},
["nerv.ElemMulLayer"] = {
- [ap("inputGMulL")] = {{}, {dim_in = {din3, din3},
- dim_out = {din3}}},
- [ap("forgetGMulL")] = {{}, {dim_in = {din3, din3},
- dim_out = {din3}}},
- [ap("outputGMulL")] = {{}, {dim_in = {din3, din3},
- dim_out = {din3}}},
+ [ap("inputGMulL")] = {dim_in = {din3, din3},
+ dim_out = {din3}},
+ [ap("forgetGMulL")] = {dim_in = {din3, din3},
+ dim_out = {din3}},
+ [ap("outputGMulL")] = {dim_in = {din3, din3},
+ dim_out = {din3}},
},
}
@@ -114,7 +114,7 @@ end
function LSTMLayer:bind_params()
local pr = layer_conf.pr
if pr == nil then
- pr = nerv.ParamRepo(nil, self.loc_type)
+ pr = nerv.ParamRepo({}, self.loc_type)
end
self.lrepo:rebind(pr)
end
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
new file mode 100644
index 0000000..e59cf5b
--- /dev/null
+++ b/nerv/layer/rnn.lua
@@ -0,0 +1,38 @@
+local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')
+
+function RNNLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+
+ local din = layer_conf.dim_in[1]
+ local dout = layer_conf.dim_out[1]
+
+ local pr = layer_conf.pr
+ if pr == nil then
+ pr = nerv.ParamRepo({}, self.loc_type)
+ end
+
+ local layers = {
+ ['nerv.AffineLayer'] = {
+ main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
+ },
+ ['nerv.SigmoidLayer'] = {
+ sigmoid = {dim_in = {dout}, dim_out = {dout}},
+ },
+ ['nerv.DuplicateLayer'] = {
+ dup = {dim_in = {dout}, dim_out = {dout, dout}},
+ }
+ }
+
+ local connections = {
+ {'<input>[1]', 'main[1]', 0},
+ {'main[1]', 'sigmoid[1]', 0},
+ {'sigmoid[1]', 'dup[1]', 0},
+ {'dup[1]', 'main[2]', 1},
+ {'dup[2]', '<output>[1]', 0},
+ }
+
+ self:add_prefix(layers, connections)
+ local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
+ self:graph_init(layer_repo, connections)
+end