aboutsummaryrefslogblamecommitdiff
path: root/nerv/layer/affine.lua
blob: 38743aa58669270cefb066ce0abcaa0b375a932c (plain) (tree)
1
2
3
4
5
6
7
8
9
10

                                                            


                                                                                

                                                                


                                                




                                   

                                
                                 
                                                  

   
                                  
                           






                                         





                                                        
                                                               
                            

                                                 
                                         
                     
                              
                                                                           
                                                                                   
        
                                                                            


       
                                                                  
                            

                                                 
                                         
                     
                              
                                                                      
                                                                                   
        
                                                                            
       

   





                                                    

   
                                                         
                            

                                                 
   
 



                                             
                                                        





                                                                          
                              
                              
                                                       
                                                                            
                                                                  
                                                                              
       
                                         
                                                           
                                               
                                                     
 

   
                                     


                                                                                  
                              







                                                                                         
                        

   



                                             
                                                  
                              

                                                                 
                                                  

   
                                             

                                                                


                                                                           
               
                                         

   
                                                                       
                              

                                                                                 
   

                                 
                                                                  


                                                     
             
   
--- Parameter and layer classes related to linear transform.

local MatrixParam = nerv.class('nerv.MatrixParam', 'nerv.Param')
local LinearTransParam = nerv.class('nerv.LinearTransParam', 'nerv.MatrixParam')
local BiasParam = nerv.class('nerv.BiasParam', 'nerv.MatrixParam')
local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')

--- A parameter that consists of a single matrix
-- @type nerv.MatrixParam

function MatrixParam:check(checker)
    -- check trans matrix type
    checker(self.trans)
end

--- Read from a file handle.
-- @param handle the file handle
function MatrixParam:read(handle)
    self.trans = self.gconf.mmat_type.load(handle)
end

function MatrixParam:write(handle)
    self.trans:save(handle)
end

function MatrixParam:train_init()
    self.correction = self.trans:create()
    self.correction:fill(0)
end

function MatrixParam:copy(copier)
    local target = nerv.MatrixParam(self.id, self.gconf)
    target.trans = copier(self.trans)
    return target
end

function MatrixParam:_update_by_gradient(gradient, alpha, beta)
    local gconf = self.gconf
    -- momentum gain
    local mmt_gain = 1.0 / (1.0 - gconf.momentum)
    local n = gconf.batch_size * mmt_gain
    -- perform update
    if gconf.momentum > 0 then
        self.correction:add(self.correction, gradient, gconf.momentum, 1.0)
        self.trans:add(self.trans, self.correction, alpha, -gconf.lrate / n * beta)
    else
        self.trans:add(self.trans, gradient, alpha, -gconf.lrate / n * beta)
    end
end

function MatrixParam:_update_by_err_input(err, input, alpha, beta)
    local gconf = self.gconf
    -- momentum gain
    local mmt_gain = 1.0 / (1.0 - gconf.momentum)
    local n = gconf.batch_size * mmt_gain
    -- perform update
    if gconf.momentum > 0 then
        self.correction:mul(input, err, 1.0, gconf.momentum, 'T', 'N')
        self.trans:add(self.trans, self.correction, alpha, -gconf.lrate / n * beta)
    else
        self.trans:mul(input, err, -gconf.lrate / n * beta, alpha, 'T', 'N')
    end
end

function MatrixParam:update_by_gradient(gradient)
    self:_update_by_gradient(gradient, 1.0, 1.0)
end

function MatrixParam:update_by_err_input(err, input)
    self:_update_by_err_input(err, input, 1.0, 1.0)
end

function LinearTransParam:update_by_err_input(err, input)
    local gconf = self.gconf
    local l2 = 1 - gconf.lrate * gconf.wcost
    self:_update_by_err_input(err, input, l2, l2)
end

--- A fully-connected linear transform layer.
-- @type nerv.AffineLayer

--- The constructor.
function AffineLayer:__init(id, global_conf, layer_conf)
    nerv.Layer.__init(self, id, global_conf, layer_conf)
    self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
    self:bind_params()
end

function AffineLayer:bind_params()
    for i = 1, #self.dim_in do
        local pid = "ltp" .. i
        local pid_list = i == 1 and {pid, "ltp"} or pid
        self["ltp" .. i] = self:find_param(pid_list, self.lconf, self.gconf,
                                            nerv.LinearTransParam,
                                            {self.dim_in[i], self.dim_out[1]})
    end
    self.ltp = self.ltp1 -- alias of ltp1
    self.bp = self:find_param("bp", self.lconf, self.gconf,
                                nerv.BiasParam,
                                {1, self.dim_out[1]})

end

function AffineLayer:init(batch_size)
    if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
        nerv.error("mismatching dimensions of linear transform and bias paramter")
    end
    for i = 1, #self.dim_in do
        if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
            nerv.error("mismatching dimensions of linear transform parameter and input")
        end
        if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
            nerv.error("mismatching dimensions of linear transform parameter and output")
        end
        self["ltp" .. i]:train_init()
    end
    self.bp:train_init()
end

function AffineLayer:batch_resize(batch_size)
    -- do nothing
end

function AffineLayer:update(bp_err, input, output)
    for i = 1, #self.dim_in do
        self["ltp" .. i]:update_by_err_input(bp_err[1], input[i])
    end
    self.bp:update_by_gradient(bp_err[1]:colsum())
end

function AffineLayer:propagate(input, output)
    -- apply linear transform
    output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N')
    for i = 2, #self.dim_in do
        output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
    end
    -- add bias
    output[1]:add_row(self.bp.trans, 1.0)
end

function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
    for i = 1, #self.dim_in do
        next_bp_err[i]:mul(bp_err[1], self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
    end
end

function AffineLayer:get_params()
    local pr = nerv.ParamRepo({self.ltp1, self.bp}, self.loc_type)
    for i = 2, #self.dim_in do
        pr:add(self["ltp" .. i].id, self["ltp" .. i])
    end
    return pr
end