aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/affine.lua
diff options
context:
space:
mode:
authortxh18 <[email protected]>2015-10-23 19:36:31 +0800
committertxh18 <[email protected]>2015-10-23 19:36:31 +0800
commit1234c026869ab052e898cc2541143fe4a22312b6 (patch)
treebd4b980ae12340b4ea3a8aa6259d43dc891b5568 /nerv/layer/affine.lua
parentf0937ae6e6401f25f15bb0e83e764ca888e81f11 (diff)
parent64fce92b7845b716f3c168036691c37b2467d99b (diff)
Just come back, let's merge the new master
Merge branch 'master' into txh18/rnnlm
Diffstat (limited to 'nerv/layer/affine.lua')
-rw-r--r--nerv/layer/affine.lua6
1 files changed, 5 insertions, 1 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index 00cbcfb..015ec3f 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -42,7 +42,7 @@ function AffineLayer:__init(id, global_conf, layer_conf)
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
self:check_dim_len(1, 1) -- exactly one input and one output
- self.direct_update = layer_conf.direct_update
+ self.direct_update = layer_conf.direct_update or global_conf.direct_update
end
function AffineLayer:init(batch_size)
@@ -60,6 +60,10 @@ function AffineLayer:init(batch_size)
self.bp:train_init()
end
+function AffineLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function AffineLayer:update(bp_err, input, output)
if self.direct_update then
self.ltp.correction:mul(input[1], bp_err[1], 1.0, gconf.momentum, 'T', 'N')