aboutsummaryrefslogtreecommitdiff
path: root/layer/init.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-20 20:00:25 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-20 20:00:25 +0800
commitf3f4e74eb4dbb8829e5ee136ba4b0c0a7938b551 (patch)
tree8beb12182020267ce32904d646ad0c736c27dcd2 /layer/init.lua
parent2ab9610a4fff798c1668cdc041515256fa813865 (diff)
change concept of ParamRepo; provide generalized param update; code clean-up; #25 #26 #27 #29
Diffstat (limited to 'layer/init.lua')
-rw-r--r--layer/init.lua12
1 files changed, 8 insertions, 4 deletions
diff --git a/layer/init.lua b/layer/init.lua
index 169427d..e39af94 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -15,11 +15,15 @@ function Param:set_info(info)
self.info = info
end
-function Param:read(pfhandle)
+function Param:read(handle)
nerv.error_method_not_implemented()
end
-function Param:write(pfhandle)
+function Param:write(handle)
+ nerv.error_method_not_implemented()
+end
+
+function Param:update(gradient)
nerv.error_method_not_implemented()
end
@@ -29,7 +33,7 @@ function Layer:__init(id, global_conf, layer_conf)
nerv.error_method_not_implemented()
end
-function Layer:init()
+function Layer:init(batch_size)
nerv.error_method_not_implemented()
end
@@ -41,7 +45,7 @@ function Layer:propagate(input, output)
nerv.error_method_not_implemented()
end
-function Layer:back_propagate(next_bp_err, bp_err, input, output)
+function Layer:back_propagate(bp_err, next_bp_err, input, output)
nerv.error_method_not_implemented()
end