diff options
author | txh18 <[email protected]> | 2015-12-20 22:08:54 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-12-20 22:08:54 +0800 |
commit | 996472e76c31ba560622841b4b31318244317c84 (patch) | |
tree | 6e1264a4ddd9a95a1e04ab9f0d862647813e23be | |
parent | 2be64c382aa8d2fedd6aaf69dff212e7afef22b5 (diff) |
small mistake in layersT
-rw-r--r-- | nerv/tnn/init.lua | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/tnn/init.lua b/nerv/tnn/init.lua index 4bbff12..b375fa8 100644 --- a/nerv/tnn/init.lua +++ b/nerv/tnn/init.lua @@ -33,7 +33,7 @@ function LayerT:check_dim_len(len_in, len_out) end end -layerT.find_param = nerv.layer.find_param +LayerT.find_param = nerv.Layer.find_param function LayerT:get_params() nerv.error_method_not_implemented() |