aboutsummaryrefslogtreecommitdiff
path: root/nn
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-06-22 19:01:29 +0800
committerDeterminant <[email protected]>2015-06-22 19:01:29 +0800
commit2497fd9e7a0fae5ee4887890d7a312e0e08a93b8 (patch)
tree382f97575bd2df9ee6abb1662b11b279fc22d72b /nn
parent196e9b48a3541caccdffc5743001cced70667091 (diff)
major change: use luarocks to manage project
Diffstat (limited to 'nn')
-rw-r--r--nn/init.lua3
-rw-r--r--nn/layer_dag.lua249
-rw-r--r--nn/layer_repo.lua34
-rw-r--r--nn/param_repo.lua76
4 files changed, 0 insertions, 362 deletions
diff --git a/nn/init.lua b/nn/init.lua
deleted file mode 100644
index 1bafa77..0000000
--- a/nn/init.lua
+++ /dev/null
@@ -1,3 +0,0 @@
-require 'nn.layer_repo'
-require 'nn.param_repo'
-require 'nn.layer_dag'
diff --git a/nn/layer_dag.lua b/nn/layer_dag.lua
deleted file mode 100644
index 8e30216..0000000
--- a/nn/layer_dag.lua
+++ /dev/null
@@ -1,249 +0,0 @@
-local DAGLayer = nerv.class("nerv.DAGLayer", "nerv.Layer")
-
-local function parse_id(str)
- local id, port, _
- _, _, id, port = string.find(str, "([a-zA-Z0-9_]+)%[([0-9]+)%]")
- if id == nil or port == nil then
- _, _, id, port = string.find(str, "(.+)%[([0-9]+)%]")
- if not (id == "<input>" or id == "<output>") then
- nerv.error("wrong format of connection id")
- end
- end
- port = tonumber(port)
- return id, port
-end
-
-local function discover(id, layers, layer_repo)
- local ref = layers[id]
- if id == "<input>" or id == "<output>" then
- return nil
- end
- if ref == nil then
- local layer = layer_repo:get_layer(id)
- local dim_in, dim_out = layer:get_dim()
- ref = {
- layer = layer,
- inputs = {},
- outputs = {},
- err_inputs = {},
- err_outputs = {},
- next_layers = {},
- input_len = #dim_in,
- output_len = #dim_out,
- in_deg = 0,
- visited = false
- }
- layers[id] = ref
- end
- return ref
-end
-
-function DAGLayer:__init(id, global_conf, layer_conf)
- local layers = {}
- local inputs = {}
- local outputs = {}
- local dim_in = layer_conf.dim_in
- local dim_out = layer_conf.dim_out
- local parsed_conn = {}
- for from, to in pairs(layer_conf.connections) do
- local id_from, port_from = parse_id(from)
- local id_to, port_to = parse_id(to)
- local ref_from = discover(id_from, layers, layer_conf.sub_layers)
- local ref_to = discover(id_to, layers, layer_conf.sub_layers)
- local input_dim, output_dim, _
- if ref_from and ref_from.outputs[port_from] ~= nil then
- nerv.error("%s has already been attached", from)
- end
- if ref_to and ref_to.inputs[port_to] ~= nil then
- nerv.error("%s has already been attached", to)
- end
- if id_from == "<input>" then
- input_dim, _ = ref_to.layer:get_dim()
- if dim_in[port_from] ~= input_dim[port_to] then
- nerv.error("mismatching data dimension between %s and %s", from, to)
- end
- inputs[port_from] = {ref_to, port_to}
- ref_to.inputs[port_to] = inputs -- just a place holder
- elseif id_to == "<output>" then
- _, output_dim = ref_from.layer:get_dim()
- if output_dim[port_from] ~= dim_out[port_to] then
- nerv.error("mismatching data dimension between %s and %s", from, to)
- end
- outputs[port_to] = {ref_from, port_from}
- ref_from.outputs[port_from] = outputs -- just a place holder
- else
- _, output_dim = ref_from.layer:get_dim()
- input_dim, _ = ref_to.layer:get_dim()
- if output_dim[port_from] ~= input_dim[port_to] then
- nerv.error("mismatching data dimension between %s and %s", from, to)
- end
-
- table.insert(parsed_conn,
- {{ref_from, port_from}, {ref_to, port_to}})
- table.insert(ref_from.next_layers, ref_to) -- add edge
- ref_to.in_deg = ref_to.in_deg + 1 -- increase the in-degree of the target layer
- end
- end
-
- -- topology sort
- local queue = {}
- local l = 1
- local r = 1
- for id, ref in pairs(layers) do
- if ref.in_deg == 0 then
- table.insert(queue, ref)
- nerv.info("adding source layer: %s", id)
- r = r + 1
- end
- end
- if l == r then
- nerv.error("loop detected")
- end
- while l < r do
- local cur = queue[l]
- cur.visited = true
- l = l + 1
- for _, nl in pairs(cur.next_layers) do
- nl.in_deg = nl.in_deg - 1
- if nl.in_deg == 0 then
- table.insert(queue, nl)
- r = r + 1
- end
- end
- end
- for i = 1, #queue do
- nerv.info("enqueued layer: %s", queue[i].layer.id)
- end
-
- for id, ref in pairs(layers) do
- -- check wether the graph is connected
- if ref.visited == false then
- nerv.warning("layer %s is ignored", id)
- end
- end
-
- self.layers = layers
- self.inputs = inputs
- self.outputs = outputs
- self.dim_in = dim_in
- self.dim_out = dim_out
- self.parsed_conn = parsed_conn
- self.queue = queue
- self.gconf = global_conf
-end
-
-function DAGLayer:init(batch_size)
- for i, conn in ipairs(self.parsed_conn) do
- local _, output_dim
- local ref_from, port_from, ref_to, port_to
- ref_from, port_from = unpack(conn[1])
- ref_to, port_to = unpack(conn[2])
- _, output_dim = ref_from.layer:get_dim()
- local mid = self.gconf.cumat_type(batch_size,
- output_dim[port_from])
- local err_mid = mid:create()
-
- ref_from.outputs[port_from] = mid
- ref_to.inputs[port_to] = mid
-
- ref_from.err_inputs[port_from] = err_mid
- ref_to.err_outputs[port_to] = err_mid
- end
- for id, ref in pairs(self.layers) do
- for i = 1, ref.input_len do
- if ref.inputs[i] == nil then
- nerv.error("dangling input port %d of layer %s", i, id)
- end
- end
- for i = 1, ref.output_len do
- if ref.outputs[i] == nil then
- nerv.error("dangling output port %d of layer %s", i, id)
- end
- end
- -- initialize sub layers
- ref.layer:init(batch_size)
- end
- for i = 1, #self.dim_in do
- if self.inputs[i] == nil then
- nerv.error("dangling port %d of layer <input>", i)
- end
- end
- for i = 1, #self.dim_out do
- if self.outputs[i] == nil then
- nerv.error("dangling port %d of layer <output>", i)
- end
- end
-end
-
-function DAGLayer:set_inputs(input)
- for i = 1, #self.dim_in do
- local layer = self.inputs[i][1]
- local port = self.inputs[i][2]
- layer.inputs[port] = input[i]
- end
-end
-
-function DAGLayer:set_outputs(output)
- for i = 1, #self.dim_out do
- local layer = self.outputs[i][1]
- local port = self.outputs[i][2]
- layer.outputs[port] = output[i]
- end
-end
-
-function DAGLayer:set_err_inputs(bp_err)
- for i = 1, #self.dim_out do
- local layer = self.outputs[i][1]
- local port = self.outputs[i][2]
- layer.err_inputs[port] = bp_err[i]
- end
-end
-
-function DAGLayer:set_err_outputs(next_bp_err)
- for i = 1, #self.dim_in do
- local layer = self.inputs[i][1]
- local port = self.inputs[i][2]
- layer.err_outputs[port] = next_bp_err[i]
- end
-end
-
-function DAGLayer:update(bp_err, input, output)
- self:set_err_inputs(bp_err)
- self:set_inputs(input)
- self:set_outputs(output)
- -- print("update")
- for id, ref in pairs(self.queue) do
- -- print(ref.layer.id)
- ref.layer:update(ref.err_inputs, ref.inputs, ref.outputs)
- end
-end
-
-function DAGLayer:propagate(input, output)
- self:set_inputs(input)
- self:set_outputs(output)
- for i = 1, #self.queue do
- local ref = self.queue[i]
- -- print(ref.layer.id)
- ref.layer:propagate(ref.inputs, ref.outputs)
- end
-end
-
-function DAGLayer:back_propagate(bp_err, next_bp_err, input, output)
- self:set_err_outputs(next_bp_err)
- self:set_err_inputs(bp_err)
- self:set_inputs(input)
- self:set_outputs(output)
- for i = #self.queue, 1, -1 do
- local ref = self.queue[i]
- -- print(ref.layer.id)
- ref.layer:back_propagate(ref.err_inputs, ref.err_outputs, ref.inputs, ref.outputs)
- end
-end
-
-function DAGLayer:get_params()
- local param_repos = {}
- for id, ref in pairs(self.queue) do
- table.insert(param_repos, ref.layer:get_params())
- end
- return nerv.ParamRepo.merge(param_repos)
-end
diff --git a/nn/layer_repo.lua b/nn/layer_repo.lua
deleted file mode 100644
index 602c37c..0000000
--- a/nn/layer_repo.lua
+++ /dev/null
@@ -1,34 +0,0 @@
-local LayerRepo = nerv.class("nerv.LayerRepo")
-
-function LayerRepo:__init(layer_spec, param_repo, global_conf)
- local layers = {}
- for ltype, llist in pairs(layer_spec) do
- local layer_type = nerv.get_type(ltype)
- for id, spec in pairs(llist) do
- if layers[id] ~= nil then
- nerv.error("a layer with id %s already exists", id)
- end
- nerv.info("create layer: %s", id)
- if type(spec[2]) ~= "table" then
- nerv.error("layer config table is need")
- end
- layer_config = spec[2]
- if type(spec[1]) ~= "table" then
- nerv.error("parameter description table is needed")
- end
- for pname, pid in pairs(spec[1]) do
- layer_config[pname] = param_repo:get_param(pid)
- end
- layers[id] = layer_type(id, global_conf, layer_config)
- end
- end
- self.layers = layers
-end
-
-function LayerRepo:get_layer(lid)
- local layer = self.layers[lid]
- if layer == nil then
- nerv.error("layer with id %s not found", lid)
- end
- return layer
-end
diff --git a/nn/param_repo.lua b/nn/param_repo.lua
deleted file mode 100644
index ab971ba..0000000
--- a/nn/param_repo.lua
+++ /dev/null
@@ -1,76 +0,0 @@
-local ParamRepo = nerv.class("nerv.ParamRepo")
-function ParamRepo:__init(plist)
- self.params = {}
- if plist ~= nil then
- for i, p in ipairs(plist) do
- self.params[p.id] = p
- end
- end
-end
-
-function ParamRepo:add(pid, p)
- if self.params[pid] ~= nil then
- nerv.error("duplicate params with the same id: %s", pid)
- end
- self.params[pid] = p
-end
-
-function ParamRepo:remove(pid, p)
- if self.params[pid] == nil then
- nerv.error("param %s does not exit", pid)
- end
- table.remove(self.params, pid)
-end
-
-function ParamRepo.merge(repos)
- local self = nerv.ParamRepo()
- for i, repo in ipairs(repos) do
- if not nerv.is_type(repo, "nerv.ParamRepo") then
- nerv.error("nerv.ParamRepo objects expected, got %s", repo)
- end
- for pid, p in pairs(repo.params) do
- self:add(pid, p)
- end
- end
- return self
-end
-
-function ParamRepo:import(param_files, pids, gconf)
- if type(param_files) ~= "table" then
- nerv.error("param file table is need")
- end
- for i = 1, #param_files do
- local pf = nerv.ChunkFile(param_files[i], "r")
- for cid, cspec in pairs(pf.metadata) do
- if pids == nil or pids[cid] ~= nil then
- local p = pf:read_chunk(cid, gconf)
- if not nerv.is_type(p, "nerv.Param") then
- nerv.error("param chunk is expected")
- end
- self:add(cid, p)
- end
- end
- end
-end
-
-function ParamRepo:export(param_file, pids)
- cf = nerv.ChunkFile(param_file, "w")
- if pids == nil then
- for id, p in pairs(self.params) do
- cf:write_chunk(p)
- end
- else
- for i, pid in ipairs(pids) do
- cf:write_chunk(self:get_param(pid))
- end
- end
- cf:close()
-end
-
-function ParamRepo:get_param(pid)
- local p = self.params[pid]
- if p == nil then
- nerv.error("param with id %s not found", pid)
- end
- return p
-end