summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorQi Liu <[email protected]>2016-03-11 20:11:00 +0800
committerQi Liu <[email protected]>2016-03-11 20:11:00 +0800
commite2a9af061db485d4388902d738c9d8be3f94ab34 (patch)
tree468d6c6afa0801f6a6bf794b3674f8814b8827f7
parent2f46a5e2b37a054f482f76f4ac3d26b144cf988f (diff)
add recipe and fix bugs
-rw-r--r--nerv/Makefile2
-rw-r--r--nerv/examples/network_debug/config.lua (renamed from lua/config.lua)11
-rw-r--r--nerv/examples/network_debug/main.lua (renamed from lua/main.lua)2
-rw-r--r--nerv/examples/network_debug/network.lua (renamed from lua/network.lua)4
-rw-r--r--nerv/examples/network_debug/reader.lua (renamed from lua/reader.lua)0
-rw-r--r--nerv/examples/network_debug/select_linear.lua (renamed from lua/select_linear.lua)5
-rw-r--r--nerv/examples/network_debug/timer.lua (renamed from lua/timer.lua)0
-rw-r--r--nerv/examples/network_debug/tnn.lua (renamed from lua/tnn.lua)0
-rw-r--r--nerv/io/init.lua3
-rw-r--r--nerv/io/seq_buffer.lua0
-rw-r--r--nerv/layer/dropout.lua11
-rw-r--r--nerv/layer/graph.lua2
-rw-r--r--nerv/layer/lstm.lua191
-rw-r--r--nerv/layer/rnn.lua20
-rw-r--r--nerv/matrix/init.lua18
15 files changed, 113 insertions, 156 deletions
diff --git a/nerv/Makefile b/nerv/Makefile
index 7921bd9..68465a1 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -44,7 +44,7 @@ LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/elem_mul.lua layer/lstm.lua layer/lstm_gate.lua layer/dropout.lua layer/gru.lua \
layer/graph.lua layer/rnn.lua layer/duplicate.lua layer/identity.lua \
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/network.lua \
- io/sgd_buffer.lua
+ io/sgd_buffer.lua io/seq_buffer.lua
INCLUDE := -I $(LUA_INCDIR) -DLUA_USE_APICHECK
CUDA_INCLUDE := -I $(CUDA_BASE)/include/
diff --git a/lua/config.lua b/nerv/examples/network_debug/config.lua
index ff98ae0..e20d5a9 100644
--- a/lua/config.lua
+++ b/nerv/examples/network_debug/config.lua
@@ -29,7 +29,6 @@ function get_layers(global_conf)
['nerv.SelectLinearLayer'] = {
['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr},
},
- ['nerv.CombinerLayer'] = {},
['nerv.AffineLayer'] = {
output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr}
},
@@ -38,9 +37,8 @@ function get_layers(global_conf)
},
}
for i = 1, global_conf.layer_num do
- layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr}
+ layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}, pr = pr}
layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}}
- layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}}
end
return layers
end
@@ -55,11 +53,8 @@ function get_connections(global_conf)
{'softmax[1]', '<output>[1]', 0},
}
for i = 1, global_conf.layer_num do
- table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0})
- table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1})
- table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1})
- table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0})
- if i > 1 then
+ table.insert(connections, {'lstm' .. i .. '[1]', 'dropout' .. i .. '[1]', 0})
+ if i < 1 then
table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0})
end
end
diff --git a/lua/main.lua b/nerv/examples/network_debug/main.lua
index 39818aa..790c404 100644
--- a/lua/main.lua
+++ b/nerv/examples/network_debug/main.lua
@@ -8,7 +8,7 @@ local timer = global_conf.timer
timer:tic('IO')
-local data_path = 'nerv/nerv/examples/lmptb/PTBdata/'
+local data_path = 'examples/lmptb/PTBdata/'
local train_reader = nerv.Reader(data_path .. 'vocab', data_path .. 'ptb.train.txt.adds')
local val_reader = nerv.Reader(data_path .. 'vocab', data_path .. 'ptb.valid.txt.adds')
diff --git a/lua/network.lua b/nerv/examples/network_debug/network.lua
index d106ba1..5518e27 100644
--- a/lua/network.lua
+++ b/nerv/examples/network_debug/network.lua
@@ -71,6 +71,7 @@ function nn:process(data, do_train)
local timer = self.gconf.timer
local total_err = 0
local total_frame = 0
+ self.network:epoch_init()
for id = 1, #data do
data[id].do_train = do_train
timer:tic('network')
@@ -101,6 +102,9 @@ end
function nn:epoch()
local train_error = self:process(self.train_data, true)
+ local tmp = self.gconf.dropout_rate
+ self.gconf.dropout_rate = 0
local val_error = self:process(self.val_data, false)
+ self.gconf.dropout_rate = tmp
return train_error, val_error
end
diff --git a/lua/reader.lua b/nerv/examples/network_debug/reader.lua
index d2624d3..d2624d3 100644
--- a/lua/reader.lua
+++ b/nerv/examples/network_debug/reader.lua
diff --git a/lua/select_linear.lua b/nerv/examples/network_debug/select_linear.lua
index a7e20cc..91beedf 100644
--- a/lua/select_linear.lua
+++ b/nerv/examples/network_debug/select_linear.lua
@@ -5,10 +5,7 @@ local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
--layer_conf: table
--Get Parameters
function SL:__init(id, global_conf, layer_conf)
- self.id = id
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.gconf = global_conf
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
self.vocab = layer_conf.vocab
self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp
diff --git a/lua/timer.lua b/nerv/examples/network_debug/timer.lua
index 2c54ca8..2c54ca8 100644
--- a/lua/timer.lua
+++ b/nerv/examples/network_debug/timer.lua
diff --git a/lua/tnn.lua b/nerv/examples/network_debug/tnn.lua
index bf9f118..bf9f118 100644
--- a/lua/tnn.lua
+++ b/nerv/examples/network_debug/tnn.lua
diff --git a/nerv/io/init.lua b/nerv/io/init.lua
index eb2e3e5..c36d850 100644
--- a/nerv/io/init.lua
+++ b/nerv/io/init.lua
@@ -52,8 +52,9 @@ function DataBuffer:__init(global_conf, buffer_conf)
nerv.error_method_not_implemented()
end
-function DataBuffer:get_batch()
+function DataBuffer:get_data()
nerv.error_method_not_implemented()
end
nerv.include('sgd_buffer.lua')
+nerv.include('seq_buffer.lua')
diff --git a/nerv/io/seq_buffer.lua b/nerv/io/seq_buffer.lua
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/nerv/io/seq_buffer.lua
diff --git a/nerv/layer/dropout.lua b/nerv/layer/dropout.lua
index 1a379c9..39a8963 100644
--- a/nerv/layer/dropout.lua
+++ b/nerv/layer/dropout.lua
@@ -2,8 +2,7 @@ local DropoutLayer = nerv.class("nerv.DropoutLayer", "nerv.Layer")
function DropoutLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
- self.rate = layer_conf.dropout_rate or global_conf.dropout_rate
- if self.rate == nil then
+ if self.gconf.dropout_rate == nil then
nerv.warning("[DropoutLayer:propagate] dropout rate is not set")
end
self:check_dim_len(1, 1) -- two inputs: nn output and label
@@ -41,12 +40,12 @@ function DropoutLayer:propagate(input, output, t)
if t == nil then
t = 1
end
- if self.rate then
+ if self.gconf.dropout_rate ~= 0 then
self.mask[t]:rand_uniform()
-- since we will lose a portion of the actvations, we multiply the
-- activations by 1 / (1 - rate) to compensate
- self.mask[t]:thres_mask(self.mask[t], self.rate,
- 0, 1 / (1.0 - self.rate))
+ self.mask[t]:thres_mask(self.mask[t], self.gconf.dropout_rate,
+ 0, 1 / (1.0 - self.gconf.dropout_rate))
output[1]:mul_elem(input[1], self.mask[t])
else
output[1]:copy_fromd(input[1])
@@ -61,7 +60,7 @@ function DropoutLayer:back_propagate(bp_err, next_bp_err, input, output, t)
if t == nil then
t = 1
end
- if self.rate then
+ if self.gconf.dropout_rate then
next_bp_err[1]:mul_elem(bp_err[1], self.mask[t])
else
next_bp_err[1]:copy_fromd(bp_err[1])
diff --git a/nerv/layer/graph.lua b/nerv/layer/graph.lua
index 5f42fca..68d5f51 100644
--- a/nerv/layer/graph.lua
+++ b/nerv/layer/graph.lua
@@ -112,7 +112,7 @@ function GraphLayer:graph_init(layer_repo, connections)
end
for i = 1, #ref.dim_out do
if ref.outputs[i] == nil then
- nerv.error('dangling output port %d os layer %s', i, id)
+ nerv.error('dangling output port %d of layer %s', i, id)
end
end
end
diff --git a/nerv/layer/lstm.lua b/nerv/layer/lstm.lua
index 641d5dc..5dbcc20 100644
--- a/nerv/layer/lstm.lua
+++ b/nerv/layer/lstm.lua
@@ -1,144 +1,85 @@
-local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.Layer')
+local LSTMLayer = nerv.class('nerv.LSTMLayer', 'nerv.GraphLayer')
function LSTMLayer:__init(id, global_conf, layer_conf)
- -- input1:x
- -- input2:h
- -- input3:c
nerv.Layer.__init(self, id, global_conf, layer_conf)
- -- prepare a DAGLayer to hold the lstm structure
+ self:check_dim_len(1, 1)
+
+ local din = layer_conf.dim_in[1]
+ local dout = layer_conf.dim_out[1]
+
local pr = layer_conf.pr
if pr == nil then
pr = nerv.ParamRepo({}, self.loc_type)
end
-
- local function ap(str)
- return self.id .. '.' .. str
- end
- local din1, din2, din3 = self.dim_in[1], self.dim_in[2], self.dim_in[3]
- local dout1, dout2, dout3 = self.dim_out[1], self.dim_out[2], self.dim_out[3]
- local layers = {
- ["nerv.CombinerLayer"] = {
- [ap("inputXDup")] = {dim_in = {din1},
- dim_out = {din1, din1, din1, din1},
- lambda = {1}},
- [ap("inputHDup")] = {dim_in = {din2},
- dim_out = {din2, din2, din2, din2},
- lambda = {1}},
-
- [ap("inputCDup")] = {dim_in = {din3},
- dim_out = {din3, din3, din3},
- lambda = {1}},
-
- [ap("mainCDup")] = {dim_in = {din3, din3},
- dim_out = {din3, din3, din3},
- lambda = {1, 1}},
+ local layers = {
+ ['nerv.CombinerLayer'] = {
+ mainCombine = {dim_in = {dout, dout}, dim_out = {dout}, lambda = {1, 1}},
},
- ["nerv.AffineLayer"] = {
- [ap("mainAffineL")] = {dim_in = {din1, din2},
- dim_out = {dout1},
- pr = pr},
+ ['nerv.DuplicateLayer'] = {
+ inputDup = {dim_in = {din}, dim_out = {din, din, din, din}},
+ outputDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
+ cellDup = {dim_in = {dout}, dim_out = {dout, dout, dout, dout, dout}},
},
- ["nerv.TanhLayer"] = {
- [ap("mainTanhL")] = {dim_in = {dout1}, dim_out = {dout1}},
- [ap("outputTanhL")] = {dim_in = {dout1}, dim_out = {dout1}},
+ ['nerv.AffineLayer'] = {
+ mainAffine = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
},
- ["nerv.LSTMGateLayer"] = {
- [ap("forgetGateL")] = {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr},
- [ap("inputGateL")] = {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr},
- [ap("outputGateL")] = {dim_in = {din1, din2, din3},
- dim_out = {din3}, pr = pr},
-
+ ['nerv.TanhLayer'] = {
+ mainTanh = {dim_in = {dout}, dim_out = {dout}},
+ outputTanh = {dim_in = {dout}, dim_out = {dout}},
},
- ["nerv.ElemMulLayer"] = {
- [ap("inputGMulL")] = {dim_in = {din3, din3},
- dim_out = {din3}},
- [ap("forgetGMulL")] = {dim_in = {din3, din3},
- dim_out = {din3}},
- [ap("outputGMulL")] = {dim_in = {din3, din3},
- dim_out = {din3}},
+ ['nerv.LSTMGateLayer'] = {
+ forgetGate = {dim_in = {din, dout, dout}, dim_out = {dout}, pr = pr},
+ inputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, pr = pr},
+ outputGate = {dim_in = {din, dout, dout}, dim_out = {dout}, pr = pr},
+ },
+ ['nerv.ElemMulLayer'] = {
+ inputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
+ forgetGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
+ outputGateMul = {dim_in = {dout, dout}, dim_out = {dout}},
},
}
- self.lrepo = nerv.LayerRepo(layers, pr, global_conf)
-
local connections = {
- ["<input>[1]"] = ap("inputXDup[1]"),
- ["<input>[2]"] = ap("inputHDup[1]"),
- ["<input>[3]"] = ap("inputCDup[1]"),
-
- [ap("inputXDup[1]")] = ap("mainAffineL[1]"),
- [ap("inputHDup[1]")] = ap("mainAffineL[2]"),
- [ap("mainAffineL[1]")] = ap("mainTanhL[1]"),
-
- [ap("inputXDup[2]")] = ap("inputGateL[1]"),
- [ap("inputHDup[2]")] = ap("inputGateL[2]"),
- [ap("inputCDup[1]")] = ap("inputGateL[3]"),
-
- [ap("inputXDup[3]")] = ap("forgetGateL[1]"),
- [ap("inputHDup[3]")] = ap("forgetGateL[2]"),
- [ap("inputCDup[2]")] = ap("forgetGateL[3]"),
-
- [ap("mainTanhL[1]")] = ap("inputGMulL[1]"),
- [ap("inputGateL[1]")] = ap("inputGMulL[2]"),
-
- [ap("inputCDup[3]")] = ap("forgetGMulL[1]"),
- [ap("forgetGateL[1]")] = ap("forgetGMulL[2]"),
-
- [ap("inputGMulL[1]")] = ap("mainCDup[1]"),
- [ap("forgetGMulL[1]")] = ap("mainCDup[2]"),
-
- [ap("inputXDup[4]")] = ap("outputGateL[1]"),
- [ap("inputHDup[4]")] = ap("outputGateL[2]"),
- [ap("mainCDup[3]")] = ap("outputGateL[3]"),
-
- [ap("mainCDup[2]")] = "<output>[2]",
- [ap("mainCDup[1]")] = ap("outputTanhL[1]"),
-
- [ap("outputTanhL[1]")] = ap("outputGMulL[1]"),
- [ap("outputGateL[1]")] = ap("outputGMulL[2]"),
-
- [ap("outputGMulL[1]")] = "<output>[1]",
+ -- lstm input
+ {'<input>[1]', 'inputDup[1]', 0},
+
+ -- input gate
+ {'inputDup[1]', 'inputGate[1]', 0},
+ {'outputDup[1]', 'inputGate[2]', 1},
+ {'cellDup[1]', 'inputGate[3]', 1},
+
+ -- forget gate
+ {'inputDup[2]', 'forgetGate[1]', 0},
+ {'outputDup[2]', 'forgetGate[2]', 1},
+ {'cellDup[2]', 'forgetGate[3]', 1},
+
+ -- lstm cell
+ {'forgetGate[1]', 'forgetGateMul[1]', 0},
+ {'cellDup[3]', 'forgetGateMul[2]', 1},
+ {'inputDup[3]', 'mainAffine[1]', 0},
+ {'outputDup[3]', 'mainAffine[2]', 1},
+ {'mainAffine[1]', 'mainTanh[1]', 0},
+ {'inputGate[1]', 'inputGateMul[1]', 0},
+ {'mainTanh[1]', 'inputGateMul[2]', 0},
+ {'inputGateMul[1]', 'mainCombine[1]', 0},
+ {'forgetGateMul[1]', 'mainCombine[2]', 0},
+ {'mainCombine[1]', 'cellDup[1]', 0},
+
+ -- forget gate
+ {'inputDup[4]', 'outputGate[1]', 0},
+ {'outputDup[4]', 'outputGate[2]', 1},
+ {'cellDup[4]', 'outputGate[3]', 0},
+
+ -- lstm output
+ {'cellDup[5]', 'outputTanh[1]', 0},
+ {'outputGate[1]', 'outputGateMul[1]', 0},
+ {'outputTanh[1]', 'outputGateMul[2]', 0},
+ {'outputGateMul[1]', 'outputDup[1]', 0},
+ {'outputDup[5]', '<output>[1]', 0},
}
- self.dag = nerv.DAGLayer(self.id, global_conf,
- {dim_in = self.dim_in,
- dim_out = self.dim_out,
- sub_layers = self.lrepo,
- connections = connections})
-
- self:check_dim_len(3, 2) -- x, h, c and h, c
-end
-
-function LSTMLayer:bind_params()
- local pr = layer_conf.pr
- if pr == nil then
- pr = nerv.ParamRepo({}, self.loc_type)
- end
- self.lrepo:rebind(pr)
-end
-
-function LSTMLayer:init(batch_size, chunk_size)
- self.dag:init(batch_size, chunk_size)
-end
-
-function LSTMLayer:batch_resize(batch_size, chunk_size)
- self.dag:batch_resize(batch_size, chunk_size)
-end
-
-function LSTMLayer:update(bp_err, input, output, t)
- self.dag:update(bp_err, input, output, t)
-end
-
-function LSTMLayer:propagate(input, output, t)
- self.dag:propagate(input, output, t)
-end
-
-function LSTMLayer:back_propagate(bp_err, next_bp_err, input, output, t)
- self.dag:back_propagate(bp_err, next_bp_err, input, output, t)
-end
-function LSTMLayer:get_params()
- return self.dag:get_params()
+ self:add_prefix(layers, connections)
+ local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
+ self:graph_init(layer_repo, connections)
end
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index e59cf5b..0b5ccaa 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -4,6 +4,10 @@ function RNNLayer:__init(id, global_conf, layer_conf)
nerv.Layer.__init(self, id, global_conf, layer_conf)
self:check_dim_len(1, 1)
+ if layer_conf.activation == nil then
+ layer_conf.activation = 'nerv.SigmoidLayer'
+ end
+
local din = layer_conf.dim_in[1]
local dout = layer_conf.dim_out[1]
@@ -16,20 +20,20 @@ function RNNLayer:__init(id, global_conf, layer_conf)
['nerv.AffineLayer'] = {
main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
},
- ['nerv.SigmoidLayer'] = {
- sigmoid = {dim_in = {dout}, dim_out = {dout}},
+ [layers.activation] = {
+ activation = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.DuplicateLayer'] = {
- dup = {dim_in = {dout}, dim_out = {dout, dout}},
- }
+ duplicate = {dim_in = {dout}, dim_out = {dout, dout}},
+ },
}
local connections = {
{'<input>[1]', 'main[1]', 0},
- {'main[1]', 'sigmoid[1]', 0},
- {'sigmoid[1]', 'dup[1]', 0},
- {'dup[1]', 'main[2]', 1},
- {'dup[2]', '<output>[1]', 0},
+ {'main[1]', 'activation[1]', 0},
+ {'activation[1]', 'duplicate[1]', 0},
+ {'duplicate[1]', 'main[2]', 1},
+ {'duplicate[2]', '<output>[1]', 0},
}
self:add_prefix(layers, connections)
diff --git a/nerv/matrix/init.lua b/nerv/matrix/init.lua
index cf85004..722c780 100644
--- a/nerv/matrix/init.lua
+++ b/nerv/matrix/init.lua
@@ -40,7 +40,8 @@ end
--- Assign each element in a matrix using the value returned by a callback `gen`.
-- @param gen the callback used to generated the values in the matrix, to which
-- the indices of row and column will be passed (e.g., `gen(i, j)`)
-function nerv.Matrix:generate(gen)
+
+function nerv.Matrix:_generate(gen)
if (self:dim() == 2) then
for i = 0, self:nrow() - 1 do
local row = self[i]
@@ -55,6 +56,21 @@ function nerv.Matrix:generate(gen)
end
end
+function nerv.Matrix:generate(gen)
+ local tmp
+ if nerv.is_type(self, 'nerv.CuMatrixFloat') then
+ tmp = nerv.MMatrixFloat(self:nrow(), self:ncol())
+ elseif nerv.is_type(self, 'nerv.CuMatrixDouble') then
+ tmp = nerv.MMatrixDouble(self:nrow(), self:ncol())
+ else
+ tmp = self
+ end
+ tmp:_generate(gen)
+ if nerv.is_type(self, 'nerv.CuMatrix') then
+ self:copy_fromh(tmp)
+ end
+end
+
--- Create a fresh new matrix of the same matrix type (as `self`).
-- @param nrow optional, the number of rows in the created matrix if specified,
-- otherwise `self:nrow()` will be used