diff options
-rw-r--r-- | nerv/Makefile | 1 | ||||
-rw-r--r-- | nerv/layer/graph.lua | 118 | ||||
-rw-r--r-- | nerv/layer/init.lua | 10 | ||||
-rw-r--r-- | nerv/layer/rnn.lua | 37 | ||||
-rw-r--r-- | nerv/main.lua | 31 | ||||
-rw-r--r-- | nerv/nn/layer_repo.lua | 14 |
6 files changed, 203 insertions, 8 deletions
diff --git a/nerv/Makefile b/nerv/Makefile index a2155b9..ba97579 100644 --- a/nerv/Makefile +++ b/nerv/Makefile @@ -34,6 +34,7 @@ LUA_LIBS := matrix/init.lua io/init.lua init.lua \ layer/init.lua layer/affine.lua layer/sigmoid.lua layer/tanh.lua layer/softmax_ce.lua layer/softmax.lua \ layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua \ layer/elem_mul.lua layer/lstm.lua layer/lstm_gate.lua layer/dropout.lua layer/gru.lua \ + layer/graph.lua layer/rnn.lua \ nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \ io/sgd_buffer.lua \ tnn/init.lua tnn/sutil.lua tnn/tnn.lua diff --git a/nerv/layer/graph.lua b/nerv/layer/graph.lua new file mode 100644 index 0000000..83cf810 --- /dev/null +++ b/nerv/layer/graph.lua @@ -0,0 +1,118 @@ +local GraphLayer = nerv.class('nerv.GraphLayer', 'nerv.Layer') + +function GraphLayer:__init(id, global_conf, layer_conf) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + self:graph_init(layer_conf.layer_repo, layer_conf.connections) +end + +local function parse_id(str) + local id, port, _ + _, _, id, port = string.find(str, "([a-zA-Z0-9_.]+)%[([0-9]+)%]") + if id == nil or port == nil then + _, _, id, port = string.find(str, "(.+)%[([0-9]+)%]") + if not (id == "<input>" or id == "<output>") then + nerv.error("wrong format of connection id") + end + end + port = tonumber(port) + return id, port +end + +local function discover(id, layers, layer_repo) + if id == '<output>' then + id = '<input>' + end + local ref = layers[id] + if ref == nil then + local layer = layer_repo:get_layer(id) + local dim_in, dim_out = layer:get_dim() + ref = { + layer = layer, + inputs = {}, + outputs = {}, + dim_in = dim_in, + dim_out = dim_out, + } + layers[id] = ref + end + return ref +end + +function GraphLayer:graph_init(layer_repo, connections) + self.connections = connections + self.sublayer = nerv.LayerRepo({}, nerv.ParamRepo(), self.gconf) + + -- check data dimension between connected ports + local layers = {} + layers['<input>'] = { + inputs = {}, + outputs = {}, + dim_in = self.dim_out, + dim_out = self.dim_in, + } + for _, edge in pairs(self.connections) do + local from = edge[1] + local to = edge[2] + local id_from, port_from = parse_id(from) + local id_to, port_to = parse_id(to) + local ref_from = discover(id_from, layers, layer_repo) + local ref_to = discover(id_to, layers, layer_repo) + if ref_to.inputs[port_to] ~= nil then + nerv.error('%s has already been attached', to) + end + if ref_from.dim_out[port_from] ~= ref_to.dim_in[port_to] then + nerv.error('mismatching data dimension between %s and %s', from, to) + end + ref_from.outputs[port_from] = true + ref_to.inputs[port_to] = true + end + + -- check dangling ports + for id, ref in pairs(layers) do + if id ~= '<input>' then + for i = 1, #ref.dim_in do + if ref.inputs[i] == nil then + nerv.error('dangling input port %d of layer %s', i, id) + end + end + for i = 1, #ref.dim_out do + if ref.outputs[i] == nil then + nerv.error('dangling output port %d os layer %s', i, id) + end + end + self.sublayer.layers[id] = ref.layer + end + end + for i = 1, #self.dim_in do + if layers['<input>'].outputs[i] == nil then + nerv.error('dangling port %d of layer <input>', i) + end + end + for i = 1, #self.dim_out do + if layers['<input>'].inputs[i] == nil then + nerv.error('dangling port %d of layer <output>', i) + end + end +end + +function GraphLayer:set_attr(name, value) + self[name] = value + for id, layer in pairs(self.sublayer.layers) do + layer:set_attr(name, value) + end +end + +function GraphLayer:get_sublayer(id) + return self.sublayer:get_layer(id) +end + +function GraphLayer:get_params() + local param_repos = {} + for id, layer in pairs(self.sublayer.layers) do + table.insert(param_repos, layer:get_params()) + end + return nerv.ParamRepo.merge(param_repos) +end diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 54f33ae..5e3395c 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -70,6 +70,14 @@ function Layer:get_dim() return self.dim_in, self.dim_out end +function Layer:set_attr(name, value) + self[name] = value +end + +function Layer:get_sublayer(id) + nerv.error('primitive layer does not have sublayers.') +end + function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim) if type(pid_list) == "string" then pid_list = {pid_list} @@ -101,6 +109,7 @@ function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim) return p end +nerv.include('graph.lua') nerv.include('affine.lua') nerv.include('sigmoid.lua') nerv.include('tanh.lua') @@ -115,6 +124,7 @@ nerv.include('lstm.lua') nerv.include('lstm_gate.lua') nerv.include('dropout.lua') nerv.include('gru.lua') +nerv.include('rnn.lua') -- The following lines are for backward compatibility, and will be removed in -- the future. The use of these names are deprecated. diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua new file mode 100644 index 0000000..a93530f --- /dev/null +++ b/nerv/layer/rnn.lua @@ -0,0 +1,37 @@ +local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer') + +function RNNLayer:__init(id, global_conf, layer_conf) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = layer_conf.gconf + self:check_dim_len(1, 1) + + local din = layer_conf.dim_in[1] + local dout = layer_conf.dim_out[1] + + local pr = layer_conf.pr + if pr == nil then + pr = nerv.ParamRepo() + end + + local layers = { + ['nerv.AffineLayer'] = { + main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr}, + }, + ['nerv.SigmoidLayer'] = { + sigmoid = {dim_in = {dout}, dim_out = {dout}}, + }, + } + + local layer_repo = nerv.LayerRepo(layers, pr, global_conf) + + local connections = { + {'<input>[1]', 'main[1]', 0}, + {'main[1]', 'sigmoid[1]', 0}, + {'sigmoid[1]', 'main[2]', 0}, + {'sigmoid[1]', '<output>[1]', 1}, + } + + self:graph_init(layer_repo, connections) +end diff --git a/nerv/main.lua b/nerv/main.lua new file mode 100644 index 0000000..85e291c --- /dev/null +++ b/nerv/main.lua @@ -0,0 +1,31 @@ +print 'Hello' + +local global_conf = { + cumat_type = nerv.CuMatrixFloat, + param_random = function() return 0 end, +} + +local layer_repo = nerv.LayerRepo( + { + ['nerv.RNNLayer'] = { + rnn = {dim_in = {23}, dim_out = {26}}, + }, + ['nerv.AffineLayer'] = { + input = {dim_in = {20}, dim_out = {23}}, + output = {dim_in = {26, 79}, dim_out = {79}}, + }, + ['nerv.SigmoidLayer'] = { + sigmoid = {dim_in = {23}, dim_out = {23}}, + }, + }, nerv.ParamRepo(), global_conf) + +local connections = { + {'<input>[1]', 'input[1]', 0}, + {'input[1]', 'sigmoid[1]', 0}, + {'sigmoid[1]', 'rnn[1]', 0}, + {'rnn[1]', 'output[1]', 0}, + {'output[1]', 'output[2]', 1}, + {'output[1]', '<output>[1]', 0}, +} + +local network = nerv.GraphLayer('network', global_conf, {dim_in = {20}, dim_out = {79}, layer_repo = layer_repo, connections = connections}) diff --git a/nerv/nn/layer_repo.lua b/nerv/nn/layer_repo.lua index 3d3a79f..a169b2b 100644 --- a/nerv/nn/layer_repo.lua +++ b/nerv/nn/layer_repo.lua @@ -12,20 +12,18 @@ function LayerRepo:add_layers(layer_spec, param_repo, global_conf) if layer_type == nil then nerv.error('layer type `%s` not found', ltype) end - for id, spec in pairs(llist) do + for id, layer_config in pairs(llist) do if layers[id] ~= nil then nerv.error("a layer with id %s already exists", id) end nerv.info("create layer: %s", id) - if type(spec[2]) ~= "table" then + if type(layer_config) ~= "table" then nerv.error("layer config table is need") end - layer_config = spec[2] - if type(spec[1]) ~= "table" then - nerv.error("parameter description table is needed") - end - for pname, pid in pairs(spec[1]) do - layer_config[pname] = param_repo:get_param(pid) + if type(layer_config.params) == "table" then + for pname, pid in pairs(layer_config.params) do + layer_config[pname] = param_repo:get_param(pid) + end end if layer_config.pr == nil then layer_config.pr = param_repo |