-- The following methods must be implemented to let a layer work properly local Param = nerv.class('nerv.Param') function Param:__init(id, global_conf) self.id = id self.gconf = global_conf end function Param:get_info() return self.info end function Param:set_info(info) self.info = info end function Param:read(handle) nerv.error_method_not_implemented() end function Param:write(handle) nerv.error_method_not_implemented() end function Param:update(gradient) nerv.error_method_not_implemented() end local Layer = nerv.class('nerv.Layer') function Layer:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end function Layer:init(batch_size) nerv.error_method_not_implemented() end function Layer:update(bp_err, input, output) nerv.error_method_not_implemented() end function Layer:propagate(input, output) nerv.error_method_not_implemented() end function Layer:back_propagate(bp_err, next_bp_err, input, output) nerv.error_method_not_implemented() end function Layer:check_dim_len(len_in, len_out) local expected_in = #self.dim_in local expected_out = #self.dim_out if len_in > 0 and expected_in ~= len_in then nerv.error("layer %s expects %d inputs, %d given", self.id, len_in, expected_in) end if len_out > 0 and expected_out ~= len_out then nerv.error("layer %s expects %d outputs, %d given", self.id, len_out, expected_out) end end function Layer:get_params() nerv.error_method_not_implemented() end function Layer:get_dim() return self.dim_in, self.dim_out end function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim) if type(pid_list) == "string" then pid_list = {pid_list} end pid_list_str = table.tostring(pid_list) for i, pid in ipairs(pid_list) do if lconf[pid] ~= nil then nerv.info("param [%s] of layer [%s] found in `layer_conf`.", pid, self.id) return lconf[pid] end local pid_g = self.id .. '_' .. pid --global identifier local pr = lconf.pr local p if pr ~= nil and pr:has_param(pid_g) == true then nerv.info("param [%s] of layer [%s] found in `layer_conf.pr`.", pid_list_str, self.id) p = pr:get_param(pid_g) return p end end nerv.info("param [%s] of layer [%s] is not found in `layer_conf` or `layer_conf.pr`, " .. "switch to auto-generate.", pid_list_str, self.id) p = p_type(pid_g, gconf) p.trans = gconf.cumat_type(unpack(p_dim)) if type(gconf.param_random) ~= "function" then nerv.error("a param generate function is needed") end p.trans:generate(gconf.param_random) return p end nerv.include('affine.lua') nerv.include('sigmoid.lua') nerv.include('tanh.lua') nerv.include('softmax_ce.lua') nerv.include('bias.lua') nerv.include('window.lua') nerv.include('mse.lua') nerv.include('combiner.lua') nerv.include('affine_recurrent.lua') nerv.include('softmax.lua') nerv.include('elem_mul.lua') nerv.include('gate_fff.lua')