diff options
author | txh18 <[email protected]> | 2016-02-16 19:42:29 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2016-02-16 19:42:29 +0800 |
commit | 490a10c2130773bd022f05513fa2905b6a6c6e91 (patch) | |
tree | a66d613c3c6cfc4054e799606f7fb56278281347 /nerv/layer/affine.lua | |
parent | 1721de3a5f5cd6df731484a8161b537468bea0bd (diff) |
fixed some minor problem
Diffstat (limited to 'nerv/layer/affine.lua')
-rw-r--r-- | nerv/layer/affine.lua | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua index d83b5f2..4156dde 100644 --- a/nerv/layer/affine.lua +++ b/nerv/layer/affine.lua @@ -85,15 +85,15 @@ function AffineLayer:__init(id, global_conf, layer_conf) end for i = 1, #self.dim_in do local pid = "ltp" .. i - local pid_list = i == 1 and {"ltp", pid} or pid + local pid_list = i == 1 and {pid, "ltp"} or pid self["ltp" .. i] = self:find_param(pid_list, layer_conf, global_conf, nerv.LinearTransParam, - {self.dim_in[i], self.dim_out[1]}, pid) + {self.dim_in[i], self.dim_out[1]}) end self.ltp = self.ltp1 -- alias of ltp1 self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, - {1, self.dim_out[1]}, "bp") + {1, self.dim_out[1]}) self.gconf = global_conf self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs end @@ -142,7 +142,7 @@ function AffineLayer:back_propagate(bp_err, next_bp_err, input, output) end function AffineLayer:get_params() - local pr = nerv.ParamRepo({self.ltp, self.bp}) + local pr = nerv.ParamRepo({self.ltp1, self.bp}) for i = 2, #self.dim_in do pr:add(self["ltp" .. i].id, self["ltp" .. i]) end |