diff options
Diffstat (limited to 'nerv/tnn/init.lua')
-rw-r--r-- | nerv/tnn/init.lua | 47 |
1 files changed, 0 insertions, 47 deletions
diff --git a/nerv/tnn/init.lua b/nerv/tnn/init.lua deleted file mode 100644 index 44ce26b..0000000 --- a/nerv/tnn/init.lua +++ /dev/null @@ -1,47 +0,0 @@ -local LayerT = nerv.class('nerv.LayerT') - -function LayerT:__init(id, global_conf, layer_conf) - nerv.error_method_not_implemented() -end - -function LayerT:init(batch_size, chunk_size) - nerv.error_method_not_implemented() -end - -function LayerT:update(bp_err, input, output, t) - nerv.error_method_not_implemented() -end - -function LayerT:propagate(input, output, t) - nerv.error_method_not_implemented() -end - -function LayerT:back_propagate(bp_err, next_bp_err, input, output, t) - nerv.error_method_not_implemented() -end - -function LayerT:check_dim_len(len_in, len_out) - local expected_in = #self.dim_in - local expected_out = #self.dim_out - if len_in > 0 and expected_in ~= len_in then - nerv.error("layer %s expects %d inputs, %d given", - self.id, len_in, expected_in) - end - if len_out > 0 and expected_out ~= len_out then - nerv.error("layer %s expects %d outputs, %d given", - self.id, len_out, expected_out) - end -end - -LayerT.find_param = nerv.Layer.find_param - -function LayerT:get_params() - nerv.error_method_not_implemented() -end - -function LayerT:get_dim() - return self.dim_in, self.dim_out -end - -nerv.include('sutil.lua') -nerv.include('tnn.lua') |