aboutsummaryrefslogtreecommitdiff
path: root/layer/init.lua
diff options
context:
space:
mode:
Diffstat (limited to 'layer/init.lua')
-rw-r--r--layer/init.lua24
1 files changed, 12 insertions, 12 deletions
diff --git a/layer/init.lua b/layer/init.lua
index c8c691b..38bcd7f 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -2,50 +2,50 @@
local Param = nerv.class('nerv.Param')
-function nerv.Param:__init(id, global_conf)
+function Param:__init(id, global_conf)
self.id = id
self.gconf = global_conf
end
-function nerv.Param:get_info()
+function Param:get_info()
return self.info
end
-function nerv.Param:set_info(info)
+function Param:set_info(info)
self.info = info
end
-function nerv.Param:read(pfhandle)
+function Param:read(pfhandle)
nerv.error_method_not_implemented()
end
-function nerv.Param:write(pfhandle)
+function Param:write(pfhandle)
nerv.error_method_not_implemented()
end
local Layer = nerv.class('nerv.Layer')
-function nerv.Layer:__init(id, global_conf, ...)
+function Layer:__init(id, global_conf, layer_conf)
nerv.error_method_not_implemented()
end
-function nerv.Layer:init(id)
+function Layer:init(id)
nerv.error_method_not_implemented()
end
-function nerv.Layer:update(bp_err, input, output)
+function Layer:update(bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:propagate(input, output)
+function Layer:propagate(input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output)
+function Layer:back_propagate(next_bp_err, bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:check_dim_len(len_in, len_out)
+function Layer:check_dim_len(len_in, len_out)
local expected_in = #self.dim_in
local expected_out = #self.dim_out
if len_in > 0 and expected_in ~= len_in then
@@ -58,7 +58,7 @@ function nerv.Layer:check_dim_len(len_in, len_out)
end
end
-function nerv.Layer:get_dim()
+function Layer:get_dim()
return self.dim_in, self.dim_out
end