aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/affine.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/affine.lua')
-rw-r--r--nerv/layer/affine.lua8
1 files changed, 4 insertions, 4 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index d83b5f2..4156dde 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -85,15 +85,15 @@ function AffineLayer:__init(id, global_conf, layer_conf)
end
for i = 1, #self.dim_in do
local pid = "ltp" .. i
- local pid_list = i == 1 and {"ltp", pid} or pid
+ local pid_list = i == 1 and {pid, "ltp"} or pid
self["ltp" .. i] = self:find_param(pid_list, layer_conf, global_conf,
nerv.LinearTransParam,
- {self.dim_in[i], self.dim_out[1]}, pid)
+ {self.dim_in[i], self.dim_out[1]})
end
self.ltp = self.ltp1 -- alias of ltp1
self.bp = self:find_param("bp", layer_conf, global_conf,
nerv.BiasParam,
- {1, self.dim_out[1]}, "bp")
+ {1, self.dim_out[1]})
self.gconf = global_conf
self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
end
@@ -142,7 +142,7 @@ function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
end
function AffineLayer:get_params()
- local pr = nerv.ParamRepo({self.ltp, self.bp})
+ local pr = nerv.ParamRepo({self.ltp1, self.bp})
for i = 2, #self.dim_in do
pr:add(self["ltp" .. i].id, self["ltp" .. i])
end