local LayerT = nerv.class('nerv.LayerT') function LayerT:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end function LayerT:init(batch_size, chunk_size) nerv.error_method_not_implemented() end function LayerT:update(bp_err, input, output, t) nerv.error_method_not_implemented() end function LayerT:propagate(input, output, t) nerv.error_method_not_implemented() end function LayerT:back_propagate(bp_err, next_bp_err, input, output, t) nerv.error_method_not_implemented() end function LayerT:check_dim_len(len_in, len_out) local expected_in = #self.dim_in local expected_out = #self.dim_out if len_in > 0 and expected_in ~= len_in then nerv.error("layer %s expects %d inputs, %d given", self.id, len_in, expected_in) end if len_out > 0 and expected_out ~= len_out then nerv.error("layer %s expects %d outputs, %d given", self.id, len_out, expected_out) end end LayerT.find_param = nerv.Layer.find_param function LayerT:get_params() nerv.error_method_not_implemented() end function LayerT:get_dim() return self.dim_in, self.dim_out end nerv.include('sutil.lua') nerv.include('tnn.lua') nerv.include('layersT/softmax_ce_t.lua') nerv.include('layersT/lstm_t.lua') nerv.include('layersT/dropout_t.lua') nerv.include('layer_dag_t.lua')