aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-20 21:49:33 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-20 21:49:33 +0800
commitddcb0a8f3ee045910acc618177dc5baf7adb8bf3 (patch)
tree0db82eb0b0ea0418803ed55c19dfc8eccc52b76a /nerv/layer
parent4f5b45b79b8d5f6a9094888cf6b929fe86ac24a3 (diff)
complete auto-generate params
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine_recurrent.lua4
1 files changed, 2 insertions, 2 deletions
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
index da189e0..d537f4a 100644
--- a/nerv/layer/affine_recurrent.lua
+++ b/nerv/layer/affine_recurrent.lua
@@ -10,8 +10,8 @@ function Recurrent:__init(id, global_conf, layer_conf)
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
- self.bp = layer_conf.bp
- self.ltp_hh = layer_conf.ltp_hh --from hidden to hidden
+ self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]}) --layer_conf.bp
+ self.ltp_hh = self:find_param("ltp_hh", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[2], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
self:check_dim_len(2, 1)
self.direct_update = layer_conf.direct_update