aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2016-04-28 14:34:56 +0800
committerDeterminant <[email protected]>2016-04-28 14:34:56 +0800
commit19abbff464f6492b5c7a9b57f94c48f690b7d4d8 (patch)
treec8f02867041100f4b2b21f4a0f3c73333343e348
parentb41943a5e6d006d364615df2ed77e8eae6286f3c (diff)
add doc for repo
-rw-r--r--nerv/nn/layer_repo.lua131
-rw-r--r--nerv/nn/param_repo.lua63
2 files changed, 188 insertions, 6 deletions
diff --git a/nerv/nn/layer_repo.lua b/nerv/nn/layer_repo.lua
index 647aac9..35ac104 100644
--- a/nerv/nn/layer_repo.lua
+++ b/nerv/nn/layer_repo.lua
@@ -1,10 +1,136 @@
+--- Implements a concecpt that stores a collection of layers.
+
+--- The class for storing a collection of layers.
+-- @type nerv.LayerRepo
+
local LayerRepo = nerv.class("nerv.LayerRepo")
+--- The constructor.
+-- @param layer_spec the *layer specification*, a declarative way of creating layers in the collection. The layer specification is structured as follow:
+--
+-- {
+-- [<layer_typename1>] =
+-- {
+-- <layer_id1> = <layer_conf1>,
+-- <layer_id2> = <layer_conf2>,
+-- <layer_id3> = <layer_conf3>,
+-- ...
+-- },
+-- [<layer_typename2>] =
+-- {
+-- ...
+-- },
+-- ...
+-- }
+-- To be short, the specification is a table containing pairs of a layer type
+-- name string (such as `"nerv.AffineLayer"`) and a table which maps layer
+-- identifiers to `layer_conf`. For `layer_conf`, see `nerv.Layer.__init` and
+-- the `__init` doc for an individual layer type.
+--
+-- Here is an example:
+--
+-- {
+-- ["nerv.AffineLayer"] =
+-- {
+-- affine0 = {dim_in = {429}, dim_out = {2048},
+-- params = {ltp = "affine0_ltp", bp = "affine0_bp"}},
+-- affine1 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine1_ltp", bp = "affine1_bp"}},
+-- affine2 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine2_ltp", bp = "affine2_bp"}},
+-- affine3 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine3_ltp", bp = "affine3_bp"}},
+-- affine4 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine4_ltp", bp = "affine4_bp"}},
+-- affine5 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine5_ltp", bp = "affine5_bp"}},
+-- affine6 = {dim_in = {2048}, dim_out = {2048},
+-- params = {ltp = "affine6_ltp", bp = "affine6_bp"}},
+-- affine7 = {dim_in = {2048}, dim_out = {3001},
+-- params = {ltp = "affine7_ltp", bp = "affine7_bp"}}
+-- },
+-- ["nerv.SigmoidLayer"] =
+-- {
+-- sigmoid0 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid1 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid2 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid3 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid4 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid5 = {dim_in = {2048}, dim_out = {2048}},
+-- sigmoid6 = {dim_in = {2048}, dim_out = {2048}}
+-- },
+-- ["nerv.SoftmaxCELayer"] = -- softmax + ce criterion layer for finetune output
+-- {
+-- ce_crit = {dim_in = {3001, 1}, dim_out = {1}, compressed = true}
+-- },
+-- ["nerv.SoftmaxLayer"] = -- softmax for decode output
+-- {
+-- softmax = {dim_in = {3001}, dim_out = {3001}}
+-- }
+-- }
+-- @param param_repo the default parameter repo to be used for binding parameters, if one layer
+-- does not specify `pr` in its layer config `layer_conf`
+-- @param global_conf a table describing the computation state and providing
+-- with some global settings
+
function LayerRepo:__init(layer_spec, param_repo, global_conf)
self.layers = {}
self:add_layers(layer_spec, param_repo, global_conf)
end
+--- Add more layers to the collection (repo).
+-- @param layer_spec the *layer specification*, a declarative way of creating layers in the collection.
+-- @param param_repo the default parameter repo to be used for binding parameters, if one layer
+-- does not specify `pr` in its layer config `layer_conf`
+-- @param global_conf a table describing the computation state and providing
+-- with some global settings
+--
+-- Here is an example for adding graph layers based on the previous example:
+-- layer_repo:add_layers(
+-- {
+-- ["nerv.GraphLayer"] =
+-- {
+-- global_transf = {
+-- dim_in = {429}, dim_out = {429},
+-- layer_repo = layer_repo,
+-- connections = {
+-- {"<input>[1]", "blayer1[1]", 0},
+-- {"blayer1[1]", "wlayer1[1]", 0},
+-- {"wlayer1[1]", "blayer2[1]", 0},
+-- {"blayer2[1]", "wlayer2[1]", 0},
+-- {"wlayer2[1]", "<output>[1]", 0}
+-- }
+-- },
+-- main = {
+-- dim_in = {429}, dim_out = {3001},
+-- layer_repo = layer_repo,
+-- connections = {
+-- {"<input>[1]", "affine0[1]", 0},
+-- {"affine0[1]", "sigmoid0[1]", 0},
+-- {"sigmoid0[1]", "affine1[1]", 0},
+-- {"affine1[1]", "sigmoid1[1]", 0},
+-- {"sigmoid1[1]", "affine2[1]", 0},
+-- {"affine2[1]", "sigmoid2[1]", 0},
+-- {"sigmoid2[1]", "affine3[1]", 0},
+-- {"affine3[1]", "sigmoid3[1]", 0},
+-- {"sigmoid3[1]", "affine4[1]", 0},
+-- {"affine4[1]", "sigmoid4[1]", 0},
+-- {"sigmoid4[1]", "affine5[1]", 0},
+-- {"affine5[1]", "sigmoid5[1]", 0},
+-- {"sigmoid5[1]", "affine6[1]", 0},
+-- {"affine6[1]", "sigmoid6[1]", 0},
+-- {"sigmoid6[1]", "affine7[1]", 0},
+-- {"affine7[1]", "<output>[1]", 0}
+-- }
+-- }
+-- }
+-- }, param_repo, gconf)
+--
+-- To fully understand the example, please check the doc for `nerv.GraphLayer`,
+-- and notice that `layer_repo` itself is passed to the graph layer config because
+-- primitive layers such as `"affine0"` have been created by the layer
+-- specification during the construction (see the example in `__init`).
+
function LayerRepo:add_layers(layer_spec, param_repo, global_conf)
local layers = self.layers
for ltype, llist in pairs(layer_spec) do
@@ -28,6 +154,9 @@ function LayerRepo:add_layers(layer_spec, param_repo, global_conf)
end
end
+--- Rebind the parameters.
+-- @param param_repo the new parameter repo used for parameter rebinding
+
function LayerRepo:rebind(param_repo)
if self.__rebinding then
return
@@ -46,6 +175,8 @@ function LayerRepo:rebind(param_repo)
self.__rebinding = false
end
+--- Get a layer from the collection (repo) by its identifier.
+-- @param lid the layer id
function LayerRepo:get_layer(lid)
local layer = self.layers[lid]
if layer == nil then
diff --git a/nerv/nn/param_repo.lua b/nerv/nn/param_repo.lua
index 932ed2a..a9eb0bd 100644
--- a/nerv/nn/param_repo.lua
+++ b/nerv/nn/param_repo.lua
@@ -1,10 +1,22 @@
+--- Implements a concept that stores a collection of parameter groups.
+
+--- The class for stroing a collection of parameter groups (`nerv.Param`).
+
local ParamRepo = nerv.class("nerv.ParamRepo")
+--- The location constants for `loc_type`.
+-- @field ON_DEVICE the storage is on device (GPU RAM)
+-- @field ON_HOST the storage is on host (main RAM)
+
ParamRepo.LOC_TYPES = {
ON_DEVICE = {},
ON_HOST = {}
}
+--- The constructor.
+-- @param plist an array of parameters that will be initially in the collection
+-- @param loc_type the type of storage location, see `nerv.ParamRepo.LOC_TYPES`
+
function ParamRepo:__init(plist, loc_type)
self.params = {}
self.loc_type = loc_type or ParamRepo.LOC_TYPES.ON_HOST
@@ -37,6 +49,9 @@ function ParamRepo:__init(plist, loc_type)
end
end
+--- Add a parameter to the collection.
+-- @param p the parameter to be added
+
function ParamRepo:add(p)
if self.params[p.id] ~= nil then
nerv.error("duplicate params with the same id: %s", p.id)
@@ -45,6 +60,9 @@ function ParamRepo:add(p)
self.params[p.id] = p
end
+--- Remove a parameter from the collection.
+-- @param pid the id of the parameter to be removed
+
function ParamRepo:remove(pid)
if self.params[pid] == nil then
nerv.error("param %s does not exit", pid)
@@ -52,7 +70,16 @@ function ParamRepo:remove(pid)
self.params[pid] = nil
end
+--- Merge two or more parameter collecitons.
+-- @param repos an array of parameter repos to be merged
+-- @param loc_type the type of storage location, see `nerv.ParamRepo.LOC_TYPES`
+-- @return the merged parameter collection (repo)
+
function ParamRepo.merge(repos, loc_type)
+
+-- TODO: remove redundant `loc_type` and check the consistency of `loc_type`
+-- from different merging param repos.
+
local self = nerv.ParamRepo(nil, loc_type)
for i, repo in ipairs(repos) do
if not nerv.is_type(repo, "nerv.ParamRepo") then
@@ -65,6 +92,12 @@ function ParamRepo.merge(repos, loc_type)
return self
end
+--- Import parameters from a NERV chunk file.
+-- @param param_files an array of filenames of the files to be loaded from
+-- @param gconf a table describing the computation state and providing
+-- with some global settings
+-- @param pids optional, an array of identifiers of the parameters to be imported
+
function ParamRepo:import(param_files, gconf, pids)
if type(param_files) ~= "table" then
nerv.error("param file table is need")
@@ -83,24 +116,36 @@ function ParamRepo:import(param_files, gconf, pids)
end
end
+--- Export the parameter collection to a NERV chunk file.
+-- @param param_file the output filename
+-- @param pids optional, the identifiers of the parameters to be exported
+
function ParamRepo:export(param_file, pids)
cf = nerv.ChunkFile(param_file, "w")
-if pids == nil then
- for id, p in pairs(self.params) do
- cf:write_chunk(p)
- end
-else
- for i, pid in ipairs(pids) do
+ if pids == nil then
+ for id, p in pairs(self.params) do
+ cf:write_chunk(p)
+ end
+ else
+ for i, pid in ipairs(pids) do
cf:write_chunk(self:get_param(pid))
end
end
cf:close()
end
+--- Test whether the collection has a parameter.
+-- @param pid the identifier to be tested
+-- @return true if a parameter with the identifier exists
+
function ParamRepo:has_param(pid)
return self.params[pid] ~= nil
end
+--- Retrieve the parameter by the identifier.
+-- @param pid the identifier of the parameter to be retrieved
+-- @return the retrieved parameter
+
function ParamRepo:get_param(pid)
local p = self.params[pid]
if p == nil then
@@ -109,6 +154,12 @@ function ParamRepo:get_param(pid)
return p
end
+--- Create a copy of the current collection.
+-- @param loc_type the storage location of the new copy
+-- @param gconf a table describing the computation state and providing
+-- with some global settings
+-- @param pids optional, an array of identifiers of the parameters to be copied
+
function ParamRepo:copy(loc_type, gconf, pids)
local copier
local target = nerv.ParamRepo(nil, loc_type)