From a32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 Mon Sep 17 00:00:00 2001 From: Determinant Date: Thu, 10 Mar 2016 13:40:11 +0800 Subject: major change: clearer param binding semantics; permit rebinding; enable resuming from previous training --- nerv/layer/window.lua | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) (limited to 'nerv/layer/window.lua') diff --git a/nerv/layer/window.lua b/nerv/layer/window.lua index 4933de0..364929f 100644 --- a/nerv/layer/window.lua +++ b/nerv/layer/window.lua @@ -1,12 +1,15 @@ local WindowLayer = nerv.class("nerv.WindowLayer", "nerv.Layer") function WindowLayer:__init(id, global_conf, layer_conf) - self.id = id - self.gconf = global_conf - self.window = layer_conf.window - self.dim_in = layer_conf.dim_in - self.dim_out = layer_conf.dim_out + nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(1, 1) + self:bind_params() +end + +function WindowLayer:bind_params() + self.window = self:find_param("window", self.lconf, self.gconf, + nerv.BiasParam, + {1, self.dim_out[1]}) end function WindowLayer:init() @@ -28,5 +31,5 @@ function WindowLayer:propagate(input, output) end function WindowLayer:get_params() - return nerv.ParamRepo({self.window}) + return nerv.ParamRepo({self.window}, self.loc_type) end -- cgit v1.2.3