From 81bf2d653902860c5d28ccade19ac6e1fd56acaf Mon Sep 17 00:00:00 2001 From: Determinant Date: Tue, 26 May 2015 14:06:52 +0800 Subject: add layer and param --- layer/affine.lua | 11 +++++++++++ layer/init.lua | 41 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 layer/affine.lua create mode 100644 layer/init.lua (limited to 'layer') diff --git a/layer/affine.lua b/layer/affine.lua new file mode 100644 index 0000000..d5c50fc --- /dev/null +++ b/layer/affine.lua @@ -0,0 +1,11 @@ +local LinearTransParam = nerv.class('nerv.LinearTransParam', 'nerv.Param') +local BiasParam = nerv.class('nerv.BiasParam', 'nerv.LinearTransParam') +local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer') + +function LinearTransParam:read(pcdata) + self.trans = nerv.CuMatrixFloat.new_from_host(nerv.MMatrixFloat.load(pcdata)) +end + +function LinearTransParam:write(pfhandle) + self.trans:new_to_host():save(pfhandle) +end diff --git a/layer/init.lua b/layer/init.lua new file mode 100644 index 0000000..c57a405 --- /dev/null +++ b/layer/init.lua @@ -0,0 +1,41 @@ +-- The following methods must be implemented to let a layer work properly + +local Param = nerv.class('nerv.Param') + +function nerv.Param:__init(id) + self.id = id +end + +function nerv.Param:get_info() + return self.info +end + +function nerv.Param:set_info(info) + self.info = info +end + +function nerv.Param:read(pfhandle) + nerv.error_method_not_implemented() +end + +function nerv.Param:write(pfhandle) + nerv.error_method_not_implemented() +end + +local Layer = nerv.class('nerv.Layer') + +function nerv.Layer:_init(param) + nerv.error_method_not_implemented() +end + +function nerv.Layer:update(bp_err, input, output) + nerv.error_method_not_implemented() +end + +function nerv.Layer:propagate(input, output) + nerv.error_method_not_implemented() +end + +function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output) + nerv.error_method_not_implemented() +end -- cgit v1.2.3