aboutsummaryrefslogtreecommitdiff
path: root/layer/init.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-03 23:00:45 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-03 23:00:45 +0800
commitea6f2990f99dd9ded6a0e74d75a3ec84900a2518 (patch)
tree03b4ea34fa373189bf6b2b017bf54793d5c89f8e /layer/init.lua
parentbb56a806e0636a0b20117b1644701d63e2bfaefb (diff)
demo now works (without random shuffle)
Diffstat (limited to 'layer/init.lua')
-rw-r--r--layer/init.lua24
1 files changed, 12 insertions, 12 deletions
diff --git a/layer/init.lua b/layer/init.lua
index c8c691b..38bcd7f 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -2,50 +2,50 @@
local Param = nerv.class('nerv.Param')
-function nerv.Param:__init(id, global_conf)
+function Param:__init(id, global_conf)
self.id = id
self.gconf = global_conf
end
-function nerv.Param:get_info()
+function Param:get_info()
return self.info
end
-function nerv.Param:set_info(info)
+function Param:set_info(info)
self.info = info
end
-function nerv.Param:read(pfhandle)
+function Param:read(pfhandle)
nerv.error_method_not_implemented()
end
-function nerv.Param:write(pfhandle)
+function Param:write(pfhandle)
nerv.error_method_not_implemented()
end
local Layer = nerv.class('nerv.Layer')
-function nerv.Layer:__init(id, global_conf, ...)
+function Layer:__init(id, global_conf, layer_conf)
nerv.error_method_not_implemented()
end
-function nerv.Layer:init(id)
+function Layer:init(id)
nerv.error_method_not_implemented()
end
-function nerv.Layer:update(bp_err, input, output)
+function Layer:update(bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:propagate(input, output)
+function Layer:propagate(input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output)
+function Layer:back_propagate(next_bp_err, bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:check_dim_len(len_in, len_out)
+function Layer:check_dim_len(len_in, len_out)
local expected_in = #self.dim_in
local expected_out = #self.dim_out
if len_in > 0 and expected_in ~= len_in then
@@ -58,7 +58,7 @@ function nerv.Layer:check_dim_len(len_in, len_out)
end
end
-function nerv.Layer:get_dim()
+function Layer:get_dim()
return self.dim_in, self.dim_out
end