aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/affine.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/affine.lua')
-rw-r--r--nerv/layer/affine.lua39
1 files changed, 33 insertions, 6 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index a1c92b1..2dd2dc0 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -1,24 +1,38 @@
---- Parameter and layer classes related to linear transform.
+--- Contains parameter and layer classes related to linear (or affine)
+-- transform.
+
+--- The class for linear transform parameter.
+-- @type nerv.LinearTransParam
-local MatrixParam = nerv.class('nerv.MatrixParam', 'nerv.Param')
local LinearTransParam = nerv.class('nerv.LinearTransParam', 'nerv.MatrixParam')
+
+--- The class for bias parameter (currently implemented as a one-row matrix).
+-- @type nerv.BiasParam
+
local BiasParam = nerv.class('nerv.BiasParam', 'nerv.MatrixParam')
-local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')
---- A parameter that consists of a single matrix
+--- The class for all matrix-based parameters. The class has a single matrix
+-- which can be accessed by `self.trans`.
-- @type nerv.MatrixParam
+local MatrixParam = nerv.class('nerv.MatrixParam', 'nerv.Param')
+
+--- Check the storage location of the contained matrix. This function is
+-- required by `nerv.ParamRepo`.
+-- @param checker the callback function for checking
function MatrixParam:check(checker)
-- check trans matrix type
checker(self.trans)
end
---- Read from a file handle.
+--- Read from a file handle. See `nerv.Param.read`.
-- @param handle the file handle
function MatrixParam:read(handle)
self.trans = self.gconf.mmat_type.load(handle)
end
+--- Write to a file handle. See `nerv.Param.write`.
+-- @param handle the file handle
function MatrixParam:write(handle)
self.trans:save(handle)
end
@@ -69,10 +83,23 @@ function MatrixParam:update_by_err_input()
self:_update(l2, l2)
end
---- A fully-connected linear transform layer.
+--- The affine layer that does the calculation Wx + b, also known as fully
+-- connected linear transform layer.
-- @type nerv.AffineLayer
+local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')
+
--- The constructor.
+-- @param id the identifier
+-- @param global_conf see `self.gconf` of `nerv.Layer.__init`
+-- @param layer_conf a table providing with settings dedicated for the layer,
+-- for `layer_conf` fields that are shared by all layers, see
+-- `nerv.Layer.__init`. The affine layer requires parameters to be bound, the
+-- following parameter names will be looked up while binding:
+--
+-- * `ltp`: the linear transformation parameter, also known as the weight matrix, W in Wx + b
+-- * `bp`: the bias parameter, also known as the bias matrix, b in Wx + b
+
function AffineLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs