aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/rnn/layers/gate_fff.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/rnn/layers/gate_fff.lua')
-rw-r--r--nerv/examples/lmptb/rnn/layers/gate_fff.lua14
1 files changed, 7 insertions, 7 deletions
diff --git a/nerv/examples/lmptb/rnn/layers/gate_fff.lua b/nerv/examples/lmptb/rnn/layers/gate_fff.lua
index 74e19ce..6a588fc 100644
--- a/nerv/examples/lmptb/rnn/layers/gate_fff.lua
+++ b/nerv/examples/lmptb/rnn/layers/gate_fff.lua
@@ -1,6 +1,6 @@
local GateFFFLayer = nerv.class('nerv.GateFFFLayer', 'nerv.Layer')
-function AffineLayer:__init(id, global_conf, layer_conf)
+function GateFFFLayer:__init(id, global_conf, layer_conf)
self.id = id
self.ltp = layer_conf.ltp
self.bp = layer_conf.bp
@@ -10,7 +10,7 @@ function AffineLayer:__init(id, global_conf, layer_conf)
self:check_dim_len(1, 1) -- exactly one input and one output
end
-function AffineLayer:init(batch_size)
+function GateFFFLayer:init(batch_size)
if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
nerv.error("mismatching dimensions of linear transform and bias paramter")
end
@@ -25,11 +25,11 @@ function AffineLayer:init(batch_size)
self.bp:train_init()
end
-function AffineLayer:batch_resize(batch_size)
+function GateFFFLayer:batch_resize(batch_size)
-- do nothing
end
-function AffineLayer:update(bp_err, input, output)
+function GateFFFLayer:update(bp_err, input, output)
if self.direct_update == true then
local gconf = self.gconf
if gconf.momentum > 0 then
@@ -51,17 +51,17 @@ function AffineLayer:update(bp_err, input, output)
end
end
-function AffineLayer:propagate(input, output)
+function GateFFFLayer:propagate(input, output)
-- apply linear transform
output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
-- add bias
output[1]:add_row(self.bp.trans, 1.0)
end
-function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
+function GateFFFLayer:back_propagate(bp_err, next_bp_err, input, output)
next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
end
-function AffineLayer:get_params()
+function GateFFFLayer:get_params()
return nerv.ParamRepo({self.ltp, self.bp})
end