From 81bf2d653902860c5d28ccade19ac6e1fd56acaf Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 26 May 2015 14:06:52 +0800 Subject: add layer and param --- layer/init.lua | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 layer/init.lua (limited to 'layer/init.lua') diff --git a/layer/init.lua b/layer/init.lua new file mode 100644 index 0000000..c57a405 --- /dev/null +++ b/layer/init.lua @@ -0,0 +1,41 @@ +-- The following methods must be implemented to let a layer work properly + +local Param = nerv.class('nerv.Param') + +function nerv.Param:__init(id) + self.id = id +end + +function nerv.Param:get_info() + return self.info +end + +function nerv.Param:set_info(info) + self.info = info +end + +function nerv.Param:read(pfhandle) + nerv.error_method_not_implemented() +end + +function nerv.Param:write(pfhandle) + nerv.error_method_not_implemented() +end + +local Layer = nerv.class('nerv.Layer') + +function nerv.Layer:_init(param) + nerv.error_method_not_implemented() +end + +function nerv.Layer:update(bp_err, input, output) + nerv.error_method_not_implemented() +end + +function nerv.Layer:propagate(input, output) + nerv.error_method_not_implemented() +end + +function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output) + nerv.error_method_not_implemented() +end -- cgit v1.2.3