diff options
author | Determinant <ted.sybil@gmail.com> | 2016-02-17 20:14:06 +0800 |
---|---|---|
committer | Determinant <ted.sybil@gmail.com> | 2016-02-17 20:14:06 +0800 |
commit | 0ee43c21af4fcd3aed070b1f5ad1eb9feb2ad159 (patch) | |
tree | ceb1d38328767fb657bc0d37ec6e513b08a86277 /nerv/tnn | |
parent | 490a10c2130773bd022f05513fa2905b6a6c6e91 (diff) |
try to merge manually
Diffstat (limited to 'nerv/tnn')
-rw-r--r-- | nerv/tnn/init.lua | 1 | ||||
-rw-r--r-- | nerv/tnn/layersT/gru_t.lua | 114 |
2 files changed, 115 insertions, 0 deletions
diff --git a/nerv/tnn/init.lua b/nerv/tnn/init.lua index b375fa8..7faca31 100644 --- a/nerv/tnn/init.lua +++ b/nerv/tnn/init.lua @@ -47,5 +47,6 @@ nerv.include('sutil.lua') nerv.include('tnn.lua') nerv.include('layersT/softmax_ce_t.lua') nerv.include('layersT/lstm_t.lua') +nerv.include('layersT/gru_t.lua') nerv.include('layersT/dropout_t.lua') nerv.include('layer_dag_t.lua') diff --git a/nerv/tnn/layersT/gru_t.lua b/nerv/tnn/layersT/gru_t.lua new file mode 100644 index 0000000..8f15cc8 --- /dev/null +++ b/nerv/tnn/layersT/gru_t.lua @@ -0,0 +1,114 @@ +local GRULayerT = nerv.class('nerv.GRULayerT', 'nerv.LayerT') + +function GRULayerT:__init(id, global_conf, layer_conf) + --input1:x input2:h input3:c(h^~) + self.id = id + self.dim_in = layer_conf.dim_in + self.dim_out = layer_conf.dim_out + self.gconf = global_conf + + if self.dim_in[2] ~= self.dim_out[1] then + nerv.error("dim_in[2](%d) mismatch with dim_out[1](%d)", self.dim_in[2], self.dim_out[1]) + end + + --prepare a DAGLayerT to hold the lstm structure + local pr = layer_conf.pr + if pr == nil then + pr = nerv.ParamRepo() + end + + local function ap(str) + return self.id .. '.' .. str + end + + local layers = { + ["nerv.CombinerLayer"] = { + [ap("inputXDup")] = {{}, {["dim_in"] = {self.dim_in[1]}, + ["dim_out"] = {self.dim_in[1], self.dim_in[1], self.dim_in[1]}, ["lambda"] = {1}}}, + [ap("inputHDup")] = {{}, {["dim_in"] = {self.dim_in[2]}, + ["dim_out"] = {self.dim_in[2], self.dim_in[2], self.dim_in[2], self.dim_in[2], self.dim_in[2]}, ["lambda"] = {1}}}, + [ap("updateGDup")] = {{}, {["dim_in"] = {self.dim_in[2]}, + ["dim_out"] = {self.dim_in[2], self.dim_in[2]}, ["lambda"] = {1}}}, + [ap("updateMergeL")] = {{}, {["dim_in"] = {self.dim_in[2], self.dim_in[2], self.dim_in[2]}, ["dim_out"] = {self.dim_out[1]}, + ["lambda"] = {1, -1, 1}}}, + }, + ["nerv.AffineLayer"] = { + [ap("mainAffineL")] = {{}, {["dim_in"] = {self.dim_in[1], self.dim_in[2]}, ["dim_out"] = {self.dim_out[1]}, ["pr"] = pr}}, + }, + ["nerv.TanhLayer"] = { + [ap("mainTanhL")] = {{}, {["dim_in"] = {self.dim_out[1]}, ["dim_out"] = {self.dim_out[1]}}}, + }, + ["nerv.GateFLayer"] = { + [ap("resetGateL")] = {{}, {["dim_in"] = {self.dim_in[1], self.dim_in[2]}, + ["dim_out"] = {self.dim_in[2]}, ["pr"] = pr}}, + [ap("updateGateL")] = {{}, {["dim_in"] = {self.dim_in[1], self.dim_in[2]}, + ["dim_out"] = {self.dim_in[2]}, ["pr"] = pr}}, + }, + ["nerv.ElemMulLayer"] = { + [ap("resetGMulL")] = {{}, {["dim_in"] = {self.dim_in[2], self.dim_in[2]}, ["dim_out"] = {self.dim_in[2]}}}, + [ap("updateGMulCL")] = {{}, {["dim_in"] = {self.dim_in[2], self.dim_in[2]}, ["dim_out"] = {self.dim_in[2]}}}, + [ap("updateGMulHL")] = {{}, {["dim_in"] = {self.dim_in[2], self.dim_in[2]}, ["dim_out"] = {self.dim_in[2]}}}, + }, + } + + local layerRepo = nerv.LayerRepo(layers, pr, global_conf) + + local connections_t = { + ["<input>[1]"] = ap("inputXDup[1]"), + ["<input>[2]"] = ap("inputHDup[1]"), + + [ap("inputXDup[1]")] = ap("resetGateL[1]"), + [ap("inputHDup[1]")] = ap("resetGateL[2]"), + [ap("inputXDup[2]")] = ap("updateGateL[1]"), + [ap("inputHDup[2]")] = ap("updateGateL[2]"), + [ap("updateGateL[1]")] = ap("updateGDup[1]"), + + [ap("resetGateL[1]")] = ap("resetGMulL[1]"), + [ap("inputHDup[3]")] = ap("resetGMulL[2]"), + + [ap("inputXDup[3]")] = ap("mainAffineL[1]"), + [ap("resetGMulL[1]")] = ap("mainAffineL[2]"), + [ap("mainAffineL[1]")] = ap("mainTanhL[1]"), + + [ap("updateGDup[1]")] = ap("updateGMulHL[1]"), + [ap("inputHDup[4]")] = ap("updateGMulHL[2]"), + [ap("updateGDup[2]")] = ap("updateGMulCL[1]"), + [ap("mainTanhL[1]")] = ap("updateGMulCL[2]"), + + [ap("inputHDup[5]")] = ap("updateMergeL[1]"), + [ap("updateGMulHL[1]")] = ap("updateMergeL[2]"), + [ap("updateGMulCL[1]")] = ap("updateMergeL[3]"), + + [ap("updateMergeL[1]")] = "<output>[1]", + } + + self.dagL = nerv.DAGLayerT(self.id, global_conf, + {["dim_in"] = self.dim_in, ["dim_out"] = self.dim_out, ["sub_layers"] = layerRepo, + ["connections"] = connections_t}) + + self:check_dim_len(2, 1) -- x, h and h +end + +function GRULayerT:init(batch_size, chunk_size) + self.dagL:init(batch_size, chunk_size) +end + +function GRULayerT:batch_resize(batch_size, chunk_size) + self.dagL:batch_resize(batch_size, chunk_size) +end + +function GRULayerT:update(bp_err, input, output, t) + self.dagL:update(bp_err, input, output, t) +end + +function GRULayerT:propagate(input, output, t) + self.dagL:propagate(input, output, t) +end + +function GRULayerT:back_propagate(bp_err, next_bp_err, input, output, t) + self.dagL:back_propagate(bp_err, next_bp_err, input, output, t) +end + +function GRULayerT:get_params() + return self.dagL:get_params() +end |