--- Parameter and layer classes related to linear transform.
local MatrixParam = nerv.class('nerv.MatrixParam', 'nerv.Param')
local LinearTransParam = nerv.class('nerv.LinearTransParam', 'nerv.MatrixParam')
local BiasParam = nerv.class('nerv.BiasParam', 'nerv.MatrixParam')
local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')
--- A parameter that consists of a single matrix
-- @type nerv.MatrixParam
function MatrixParam:check(checker)
-- check trans matrix type
checker(self.trans)
end
--- Read from a file handle.
-- @param handle the file handle
function MatrixParam:read(handle)
self.trans = self.gconf.mmat_type.load(handle)
end
function MatrixParam:write(handle)
self.trans:save(handle)
end
function MatrixParam:train_init()
self.correction = self.trans:create()
self.correction_acc = self.correction:create()
self.correction:fill(0)
self.correction_acc:fill(0)
end
function MatrixParam:copy(copier)
local target = nerv.MatrixParam(self.id, self.gconf)
target.trans = copier(self.trans)
return target
end
function MatrixParam:_update(alpha, beta)
local gconf = self.gconf
-- momentum gain
local mmt_gain = 1.0 / (1.0 - gconf.momentum)
local n = gconf.batch_size * mmt_gain
-- perform update
if gconf.momentum > 0 then
self.correction:add(self.correction, self.correction_acc, gconf.momentum, 1.0)
self.trans:add(self.trans, self.correction, alpha, -gconf.lrate / n * beta)
else
self.trans:add(self.trans, self.correction_acc, alpha, -gconf.lrate / n * beta)
end
self.correction_acc:fill(0)
end
function MatrixParam:back_propagate_by_gradient(gradient)
self.correction_acc:add(self.correction_acc, gradient, 1.0, 1.0)
end
function MatrixParam:back_propagate_by_err_input(err, input)
self.correction_acc:mul(input, err, 1.0, 1.0, 'T', 'N')
end
function MatrixParam:update_by_gradient()
self:_update(1.0, 1.0)
end
function MatrixParam:update_by_err_input()
local gconf = self.gconf
local l2 = 1 - gconf.lrate * gconf.wcost
self:_update(l2, l2)
end
--- A fully-connected linear transform layer.
-- @type nerv.AffineLayer
--- The constructor.
function AffineLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
self:bind_params()
end
function AffineLayer:bind_params()
for i = 1,<