aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authormfy43 <ted.sybil@gmail.com>2016-04-19 10:32:00 +0800
committermfy43 <ted.sybil@gmail.com>2016-04-19 10:32:00 +0800
commit8f6eb50628cadbd0e831d72c4528400b94431ee6 (patch)
tree7cb32d6ee3ccf469bca1a16c067c1ae0732fb0bb
parentc73636ba680cdb5b57602a1876a75c110b43d426 (diff)
parentb5b6b57751d6bc3379c7379cc0ec5164fade8e3c (diff)
Merge branch 'master' into 'master'
add projection layer and lstmp layer See merge request !1
-rw-r--r--nerv/Makefile1
-rw-r--r--nerv/layer/init.lua2
-rw-r--r--nerv/layer/lstmp.lua91
-rw-r--r--nerv/layer/projection.lua64
4 files changed, 158 insertions, 0 deletions
diff --git a/nerv/Makefile b/nerv/Makefile
index dde8fe7..f74a92f 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -40,6 +40,7 @@ OBJS := $(CORE_OBJS) $(NERV_OBJS) $(LUAT_OBJS)
LIBS := $(INST_LIBDIR)/libnerv.so $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT.so
LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/init.lua layer/affine.lua layer/sigmoid.lua layer/tanh.lua layer/softmax_ce.lua layer/softmax.lua \
+ layer/lstmp.lua layer/projection.lua \
layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua \
layer/elem_mul.lua layer/lstm.lua layer/lstm_gate.lua layer/dropout.lua layer/gru.lua \
layer/graph.lua layer/rnn.lua layer/duplicate.lua layer/identity.lua \
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 3a6cbcd..c893df3 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -152,6 +152,8 @@ nerv.include('gru.lua')
nerv.include('rnn.lua')
nerv.include('duplicate.lua')
nerv.include('identity.lua')
+nerv.include('projection.lua')
+nerv.include('lstmp.lua')
-- The following lines are for backward compatibility, and will be removed in
-- the future. The use of these names are deprecated.
diff --git a/nerv/layer/lstmp.lua b/nerv/layer/lstmp.lua
new file mode 100644
index 0000000..bbb2091
--- /dev/null
+++ b/nerv/layer/lstmp.lua
@@ -0,0 +1,91 @@
+local LSTMPLayer = nerv.class('nerv.LSTMPLayer', 'nerv.GraphLayer')
+
+function LSTMPLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+
+ local din = layer_conf.dim_in[1]
+ local dcell = layer_conf.cell_dim
+ local dout = layer_conf.dim_out[1]
+
+ local pr = layer_conf.pr
+ if pr == nil then
+ pr = nerv.ParamRepo({}, self.loc_type)
+ end
+
+ local layers = {
+ ['nerv.CombinerLayer'] = {
+ mainCombine = {dim_in = {dcell, dcell}, dim_out = {dcell}, lambda = {1, 1}},
+ },
+ ['nerv.DuplicateLayer'] = {
+ inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
+ outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
+ cellDup = {dim_in = {dcell}, dim_out = {dcell, dcell, dcell, dcell, dcell}},
+ },
+ ['nerv.AffineLayer'] = {
+ mainAffine = {dim_in = {din, dout}, dim_out = {dcell}, pr = pr},
+ },
+ ['nerv.TanhLayer'] = {
+ mainTanh = {dim_in = {dcell}, dim_out = {dcell}},
+ outputTanh = {dim_in = {dcell}, dim_out = {dcell}},
+ },
+ ['nerv.LSTMGateLayer'] = {
+ forgetGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr},
+ inputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr},
+ outputGate = {dim_in = {din, dout, dcell}, dim_out = {dcell}, param_type = {'N', 'N', 'D'}, pr = pr},
+ },
+ ['nerv.ElemMulLayer'] = {
+ inputGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}},
+ forgetGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}},
+ outputGateMul = {dim_in = {dcell, dcell}, dim_out = {dcell}},
+ },
+ ['nerv.ProjectionLayer'] = {
+ projection = {dim_in = {dcell}, dim_out = {dout}, pr = pr},
+ },
+ }
+
+ local connections = {
+ -- lstm input
+ {'<input>[1]', 'inputDup[1]', 0},
+
+ -- input gate
+ {'inputDup[1]', 'inputGate[1]', 0},
+ {'outputDup[1]', 'inputGate[2]', 1},
+ {'cellDup[1]', 'inputGate[3]', 1},
+
+ -- forget gate
+ {'inputDup[2]', 'forgetGate[1]', 0},
+ {'outputDup[2]', 'forgetGate[2]', 1},
+ {'cellDup[2]', 'forgetGate[3]', 1},
+
+ -- lstm cell
+ {'forgetGate[1]', 'forgetGateMul[1]', 0},
+ {'cellDup[3]', 'forgetGateMul[2]', 1},
+ {'inputDup[3]', 'mainAffine[1]', 0},
+ {'outputDup[3]', 'mainAffine[2]', 1},
+ {'mainAffine[1]', 'mainTanh[1]', 0},
+ {'inputGate[1]', 'inputGateMul[1]', 0},
+ {'mainTanh[1]', 'inputGateMul[2]', 0},
+ {'inputGateMul[1]', 'mainCombine[1]', 0},
+ {'forgetGateMul[1]', 'mainCombine[2]', 0},
+ {'mainCombine[1]', 'cellDup[1]', 0},
+
+ -- forget gate
+ {'inputDup[4]', 'outputGate[1]', 0},
+ {'outputDup[4]', 'outputGate[2]', 1},
+ {'cellDup[4]', 'outputGate[3]', 0},
+
+ -- lstm output
+ {'cellDup[5]', 'outputTanh[1]', 0},
+ {'outputGate[1]', 'outputGateMul[1]', 0},
+ {'outputTanh[1]', 'outputGateMul[2]', 0},
+ {'outputGateMul[1]', 'projection[1]', 0},
+ {'projection[1]', 'outputDup[1]', 0},
+ {'outputDup[5]', '<output>[1]', 0},
+ }
+
+ self:add_prefix(layers, connections)
+ local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
+ self.lrepo = layer_repo
+ self:graph_init(layer_repo, connections)
+end
diff --git a/nerv/layer/projection.lua b/nerv/layer/projection.lua
new file mode 100644
index 0000000..d99401c
--- /dev/null
+++ b/nerv/layer/projection.lua
@@ -0,0 +1,64 @@
+local ProjectionLayer = nerv.class('nerv.ProjectionLayer', 'nerv.Layer')
+
+--- The constructor.
+function ProjectionLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(-1, 1) -- exactly one output, allow multiple inputs
+ self:bind_params()
+end
+
+function ProjectionLayer:bind_params()
+ for i = 1, #self.dim_in do
+ local pid = "ltp" .. i
+ local pid_list = i == 1 and {pid, "ltp"} or pid
+ self["ltp" .. i] = self:find_param(pid_list, self.lconf, self.gconf,
+ nerv.LinearTransParam,
+ {self.dim_in[i], self.dim_out[1]})
+ end
+ self.ltp = self.ltp1 -- alias of ltp1
+end
+
+function ProjectionLayer:init(batch_size)
+ for i = 1, #self.dim_in do
+ if self.dim_in[i] ~= self["ltp" .. i].trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self["ltp" .. i].trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+ self["ltp" .. i]:train_init()
+ end
+end
+
+function ProjectionLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
+function ProjectionLayer:update()
+ for i = 1, #self.dim_in do
+ self["ltp" .. i]:update_by_err_input()
+ end
+end
+
+function ProjectionLayer:propagate(input, output)
+ -- apply linear transform
+ output[1]:mul(input[1], self.ltp1.trans, 1.0, 0.0, 'N', 'N')
+ for i = 2, #self.dim_in do
+ output[1]:mul(input[i], self["ltp" .. i].trans, 1.0, 1.0, 'N', 'N')
+ end
+end
+
+function ProjectionLayer:back_propagate(bp_err, next_bp_err, input, output)
+ for i = 1, #self.dim_in do
+ next_bp_err[i]:mul(bp_err[1], self["ltp" .. i].trans, 1.0, 0.0, 'N', 'T')
+ self["ltp" .. i]:back_propagate_by_err_input(bp_err[1], input[i])
+ end
+end
+
+function ProjectionLayer:get_params()
+ local pr = nerv.ParamRepo({self.ltp1}, self.loc_type)
+ for i = 2, #self.dim_in do
+ pr:add(self["ltp" .. i].id, self["ltp" .. i])
+ end
+ return pr
+end