aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua9
1 files changed, 8 insertions, 1 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index 015ec3f..0462383 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -31,7 +31,14 @@ function LinearTransParam:update(gradient)
MatrixParam.update(self, gradient)
local gconf = self.gconf
-- weight decay
- self.trans:add(self.trans, self.trans, 1.0, -gconf.lrate * gconf.wcost)
+ self.trans:add(self.trans, self.trans, 1.0, -gconf.lrate * gconf.wcost / gconf.batch_size)
+end
+
+function BiasParam:update(gradient)
+ MatrixParam.update(self, gradient)
+ local gconf = self.gconf
+ -- weight decay
+ self.trans:add(self.trans, self.trans, 1.0, -gconf.lrate * gconf.wcost / gconf.batch_size)
end
function AffineLayer:__init(id, global_conf, layer_conf)