aboutsummaryrefslogblamecommitdiff
path: root/nerv/layer/init.lua
blob: d266773c441070c49470b873e850342396d23780 (plain) (tree)
1
2
3
4
5
6
7
8
9



                                                                         
                                      
                
                            

   
                         


                    
                             


                    
                           


                                       




                                       




                                       
                                                  











                                                          

   
                               


                                       
                                            


                                       
                                       


                                       
                                                                 

                                       
 
                                             

                                      








                                                           
 



                                       



                                       
                        


                                    




                                    
                                                         

   


                                                             
       








                                                                                                          
           

                                          
           
       

                                    
                                                                                                   





                                                                                             


                                                         
                                        


            
                         

                           
                        




                              
                           
                            



                             
                       
                             
                            






                                                                             
-- The following methods must be implemented to let a layer work properly

local Param = nerv.class('nerv.Param')

function Param:__init(id, global_conf)
    self.id = id
    self.gconf = global_conf
end

function Param:get_info()
    return self.info
end

function Param:set_info(info)
    self.info = info
end

function Param:read(handle)
    nerv.error_method_not_implemented()
end

function Param:write(handle)
    nerv.error_method_not_implemented()
end

function Param:update(gradient)
    nerv.error_method_not_implemented()
end

local Layer = nerv.class('nerv.Layer')

function Layer:__init(id, global_conf, layer_conf)
    self.id = id
    self.gconf = global_conf
    self.lconf = layer_conf
    if self.gconf.use_cpu then
        self.mat_type = self.gconf.mmat_type
        self.loc_type = nerv.ParamRepo.LOC_TYPES.ON_HOST
    else
        self.mat_type = self.gconf.cumat_type
        self.loc_type = nerv.ParamRepo.LOC_TYPES.ON_DEVICE
    end
    self.dim_in = layer_conf.dim_in
    self.dim_out = layer_conf.dim_out
end

function Layer:init(batch_size)
    nerv.error_method_not_implemented()
end

function Layer:update(bp_err, input, output)
    nerv.error_method_not_implemented()
end

function Layer:propagate(input, output)
    nerv.error_method_not_implemented()
end

function Layer:back_propagate(bp_err, next_bp_err, input, output)
    nerv.error_method_not_implemented()
end

function Layer:check_dim_len(len_in, len_out)
    local expected_in = #self.dim_in
    local expected_out = #self.dim_out
    if len_in > 0 and expected_in ~= len_in then
        nerv.error("layer %s expects %d inputs, %d given",
                    self.id, len_in, expected_in)
    end
    if len_out > 0 and expected_out ~= len_out then
        nerv.error("layer %s expects %d outputs, %d given",
                    self.id, len_out, expected_out)
    end
end

function Layer:get_params()
    nerv.error_method_not_implemented()
end

function Layer:bind_params()
    nerv.error_method_not_implemented()
end

function Layer:get_dim()
    return self.dim_in, self.dim_out
end

function Layer:set_attr(name, value)
    self[name] = value
end

function Layer:get_sublayer(id)
    nerv.error('primitive layer does not have sublayers')
end

function Layer:find_param(plist, lconf, gconf, p_type, p_dim)
    if type(plist) == "string" then
        plist = {plist}
    end
    if lconf.params == nil then
        lconf.params = {}
    end
    plist_str = table.tostring(plist)
    local pid
    for i, pname in ipairs(plist) do
        if lconf.params[pname] ~= nil then
            nerv.info("param id for [%s] of layer [%s] specified in `layer_conf.params`.", pname, self.id)
            pid = lconf.params[pname]
        end
        if lconf.pr:has_param(pid) then
            return lconf.pr:get_param(pid)
        end
    end
    pid = self.id .. '_' .. plist[1]
    if lconf.pr:has_param(pid) then
        nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id)
        return lconf.pr:get_param(pid)
    end
    nerv.info("param id for [%s] of layer [%s] is not found in the specified param repo, " ..
                "switch to auto-generate", plist_str, self.id)
    local p = p_type(pid, gconf)
    p.trans = self.mat_type(unpack(p_dim))
    if type(gconf.param_random) ~= "function" then
        nerv.error("a param generate function is needed")
    end
    p.trans:generate(gconf.param_random)
    return p
end

nerv.include('graph.lua')
nerv.include('affine.lua')
nerv.include('sigmoid.lua')
nerv.include('tanh.lua')
nerv.include('softmax_ce.lua')
nerv.include('bias.lua')
nerv.include('window.lua')
nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('softmax.lua')
nerv.include('elem_mul.lua')
nerv.include('lstm.lua')
nerv.include('lstm_gate.lua')
nerv.include('dropout.lua')
nerv.include('gru.lua')
nerv.include('rnn.lua')
nerv.include('duplicate.lua')
nerv.include('identity.lua')

-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
nerv.DropoutLayerT = nerv.DropoutLayer
nerv.GRULayerT = nerv.GRULayer
nerv.LSTMLayerT = nerv.LSTMLayer
nerv.SoftmaxCELayerT = nerv.SoftmaxCELayer