aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/projection.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/projection.lua')
-rw-r--r--nerv/layer/projection.lua64
1 files changed, 64 insertions, 0 deletions
diff --git a/nerv/layer/projection.lua b/nerv/layer/projection.lua
new file mode 100644
index 0000000..d99401c
--- /dev/null
+++ b/nerv/layer/projection.lua
@@ -0,0 +1,64 @@
+local ProjectionLayer = nerv.class('nerv.ProjectionLayer', 'nerv.Layer')
+
+--- The constructor.
+function ProjectionLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
+ self:bind_params()
+end
+
+function ProjectionLayer:bind_params()
+ for i = 1, #self.dim_in do
+ local pid = "ltp" .. i
+ local pid_list = i == 1 and {pid, "ltp"} or pid
+ self["ltp" .. i] = self:find_param(pid_list, self.lconf, self.gconf,
+ nerv.LinearTransParam,
+ {self.dim_in[i], self.dim_out[1]})
+ end
+ self.ltp = self.ltp1 -- alias of ltp1
+end
+
+function ProjectionLayer:init(batch_size)
+ for i = 1, #self.dim_in do
+ if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+ self["ltp" .. i]:train_init()
+ end
+end
+
+function ProjectionLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
+function ProjectionLayer:update()
+ for i = 1, #self.dim_in do
+ self["ltp" .. i]:update_by_err_input()
+ end
+end
+
+function ProjectionLayer:propagate(input, output)
+ -- apply linear transform
+ output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N')
+ for i = 2, #self.dim_in do
+ output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
+ end
+end
+
+function ProjectionLayer:back_propagate(bp_err, next_bp_err, input, output)
+ for i = 1, #self.dim_in do
+ next_bp_err[i]:mul(bp_err[1], self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
+ self["ltp" .. i]:back_propagate_by_err_input(bp_err[1], input[i])
+ end
+end
+
+function ProjectionLayer:get_params()
+ local pr = nerv.ParamRepo({self.ltp1}, self.loc_type)
+ for i = 2, #self.dim_in do
+ pr:add(self["ltp" .. i].id, self["ltp" .. i])
+ end
+ return pr
+end