From 996472e76c31ba560622841b4b31318244317c84 Mon Sep 17 00:00:00 2001 From: txh18 Date: Sun, 20 Dec 2015 22:08:54 +0800 Subject: small mistake in layersT --- nerv/tnn/init.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nerv/tnn/init.lua b/nerv/tnn/init.lua index 4bbff12..b375fa8 100644 --- a/nerv/tnn/init.lua +++ b/nerv/tnn/init.lua @@ -33,7 +33,7 @@ function LayerT:check_dim_len(len_in, len_out) end end -layerT.find_param = nerv.layer.find_param +LayerT.find_param = nerv.Layer.find_param function LayerT:get_params() nerv.error_method_not_implemented() -- cgit v1.2.3