aboutsummaryrefslogtreecommitdiff
path: root/layer
diff options
context:
space:
mode:
Diffstat (limited to 'layer')
-rw-r--r--layer/affine.lua20
-rw-r--r--layer/init.lua2
2 files changed, 19 insertions, 3 deletions
diff --git a/layer/affine.lua b/layer/affine.lua
index 5f1b4ce..67c5854 100644
--- a/layer/affine.lua
+++ b/layer/affine.lua
@@ -10,16 +10,32 @@ function LinearTransParam:write(pfhandle)
self.trans:new_to_host():save(pfhandle)
end
-function AffineLayer:__init(id, ltp, bp)
+function AffineLayer:__init(id, global_conf, ltp, bp)
self.ltp = ltp
self.bp = bp
+ self.gconf = global_conf
+ -- linear transform correction
+ self.ltc = ltp:create()
+ self.ltc:fill(0)
+ -- bias correction
+ self.bc = bp:create()
+ self.bc:fill(0)
end
function nerv.AffineLayer:update(input, output)
+ -- momentum gain --
+ mmt_gain = 1.0 / (1.0 - gconf.momentum);
+ n = input.nrow() * mmt_gain
+-- ltc =
end
function nerv.AffineLayer:propagate(input, output)
+ -- apply linear transform
+ output:mul(input, self.ltp, 'N', 'N')
+ -- add bias
+ output:add_row(self.bp, 1.0)
end
-function nerv.AffineLayer:back_propagate(input, output)
+function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output)
+ next_bp_err:mul(bp_err, self.ltp, 'N', 'T')
end
diff --git a/layer/init.lua b/layer/init.lua
index 62a2924..6923dbd 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -24,7 +24,7 @@ end
local Layer = nerv.class('nerv.Layer')
-function nerv.Layer:_init(id, ...)
+function nerv.Layer:_init(id, global_conf, ...)
nerv.error_method_not_implemented()
end