aboutsummaryrefslogtreecommitdiff
path: root/nerv
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-24 00:14:21 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-24 00:14:21 +0800
commitf829b2b49d1db7fb6a49109722b9c7a41ae9324a (patch)
tree606e8bb3a42d2b5e4660a0835993920d610a31c0 /nerv
parent80b18045c2f7d0cc5aba5c4b852694d869c3f830 (diff)
still working on dagL_T
Diffstat (limited to 'nerv')
-rw-r--r--nerv/examples/lmptb/tnn/layer_dag_t.lua43
-rw-r--r--nerv/examples/lmptb/unfold_ptb_main.lua9
2 files changed, 29 insertions, 23 deletions
diff --git a/nerv/examples/lmptb/tnn/layer_dag_t.lua b/nerv/examples/lmptb/tnn/layer_dag_t.lua
index 1a89816..cd5fba7 100644
--- a/nerv/examples/lmptb/tnn/layer_dag_t.lua
+++ b/nerv/examples/lmptb/tnn/layer_dag_t.lua
@@ -22,6 +22,7 @@ local function discover(id, layers, layer_repo)
local layer = layer_repo:get_layer(id)
local dim_in, dim_out = layer:get_dim()
ref = {
+ id = layer.id,
layer = layer,
inputs = {},
outputs = {},
@@ -51,26 +52,33 @@ function DAGLayerT:__init(id, global_conf, layer_conf)
local ref_from = discover(id_from, layers, layer_conf.sub_layers)
local ref_to = discover(id_to, layers, layer_conf.sub_layers)
local input_dim, output_dim, _
- if ref_from and ref_from.outputs[port_from] ~= nil then
- nerv.error("%s has already been attached", from)
- end
- if ref_to and ref_to.inputs[port_to] ~= nil then
- nerv.error("%s has already been attached", to)
- end
if id_from == "<input>" then
input_dim, _ = ref_to.layer:get_dim()
if dim_in[port_from] ~= input_dim[port_to] then
nerv.error("mismatching data dimension between %s and %s", from, to)
end
inputs[port_from] = {ref_to, port_to}
- ref_to.inputs[port_to] = inputs -- just a place holder
+ if ref_to.inputs[1] == nil then
+ ref_to.inputs[1] = {}
+ end
+ if ref_to.inputs[1][port_to] ~= nil then
+ nerv.error("port(%d) for layer(%s) already attached", port_to, to)
+ end
+ ref_to.inputs[1][port_to] = inputs -- just a place holder
elseif id_to == "<output>" then
_, output_dim = ref_from.layer:get_dim()
if output_dim[port_from] ~= dim_out[port_to] then
nerv.error("mismatching data dimension between %s and %s", from, to)
end
outputs[port_to] = {ref_from, port_from}
- ref_from.outputs[port_from] = outputs -- just a place holder
+ if ref_from.outputs[1] == nil then
+ ref_from.outputs[1] = {}
+ end
+ if ref_from.outputs[1][port_from] ~= nil then
+ nerv.error("port(%d) for layer(%s) already attached", port_from, from)
+ end
+ ref_from.outputs[1] = {}
+ ref_from.outputs[1][port_from] = outputs -- just a place holder
else
_, output_dim = ref_from.layer:get_dim()
input_dim, _ = ref_to.layer:get_dim()
@@ -134,10 +142,10 @@ function DAGLayerT:__init(id, global_conf, layer_conf)
end
function DAGLayerT:init(batch_size, chunk_size)
- nerv.printf("initing DAGLayerT\n")
+ nerv.info("initing DAGLayerT...\n")
if chunk_size == nil then
chunk_size = 1
- nerv.printf("(Initing DAGLayerT) chunk_size is nil, setting it to default 1\n")
+ nerv.info("(Initing DAGLayerT) chunk_size is nil, setting it to default 1\n")
end
self.chunk_size = chunk_size
@@ -179,12 +187,12 @@ function DAGLayerT:init(batch_size, chunk_size)
end
for id, ref in pairs(self.layers) do
for i = 1, ref.input_len do
- if ref.inputs[i] == nil then
+ if ref.inputs[1][i] == nil then --peek at time 1
nerv.error("dangling input port %d of layer %s", i, id)
end
end
for i = 1, ref.output_len do
- if ref.outputs[i] == nil then
+ if ref.outputs[1][i] == nil then --peek at time 1
nerv.error("dangling output port %d of layer %s", i, id)
end
end
@@ -258,6 +266,7 @@ function DAGLayerT:set_inputs(input, t)
end
local layer = self.inputs[i][1]
local port = self.inputs[i][2]
+
layer.inputs[t][port] = input[i]
end
end
@@ -296,10 +305,8 @@ function DAGLayerT:update(bp_err, input, output, t)
self:set_err_inputs(bp_err, t)
self:set_inputs(input, t)
self:set_outputs(output, t)
- -- print("update")
for id, ref in pairs(self.queue) do
- -- print(ref.layer.id)
- ref.layer:update(ref.err_inputs, ref.inputs, ref.outputs, t)
+ ref.layer:update(ref.err_inputs[t], ref.inputs[t], ref.outputs[t], t)
end
end
@@ -312,8 +319,7 @@ function DAGLayerT:propagate(input, output)
local ret = false
for i = 1, #self.queue do
local ref = self.queue[i]
- -- print(ref.layer.id)
- ret = ref.layer:propagate(ref.inputs, ref.outputs, t)
+ ret = ref.layer:propagate(ref.inputs[t], ref.outputs[t], t)
end
return ret
end
@@ -328,8 +334,7 @@ function DAGLayerT:back_propagate(bp_err, next_bp_err, input, output, t)
self:set_outputs(output, t)
for i = #self.queue, 1, -1 do
local ref = self.queue[i]
- -- print(ref.layer.id)
- ref.layer:back_propagate(ref.err_inputs, ref.err_outputs, ref.inputs, ref.outputs, t)
+ ref.layer:back_propagate(ref.err_inputs[t], ref.err_outputs[t], ref.inputs[t], ref.outputs[t], t)
end
end
diff --git a/nerv/examples/lmptb/unfold_ptb_main.lua b/nerv/examples/lmptb/unfold_ptb_main.lua
index 6c4ead3..5affadf 100644
--- a/nerv/examples/lmptb/unfold_ptb_main.lua
+++ b/nerv/examples/lmptb/unfold_ptb_main.lua
@@ -8,6 +8,7 @@
require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
+require 'tnn.init'
nerv.include('lmptb/layer/init.lua')
--[[global function rename]]--
@@ -70,7 +71,7 @@ function prepare_layers(global_conf, paramRepo)
},
["nerv.SelectLinearLayer"] = {
- ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}},
+ ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}},
},
["nerv.SigmoidLayer"] = {
@@ -90,7 +91,7 @@ function prepare_layers(global_conf, paramRepo)
for i = 1, global_conf.bptt do
layers["nerv.IndRecurrentLayer"]["recurrentL" .. (i + 1)] = recurrentLconfig
layers["nerv.SigmoidLayer"]["sigmoidL" .. (i + 1)] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}}
- layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}
+ layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}}
end
local layerRepo = nerv.LayerRepo(layers, paramRepo, global_conf)
printf("%s preparing layers end.\n", global_conf.sche_log_pre)
@@ -138,7 +139,7 @@ function prepare_dagLayer(global_conf, layerRepo)
printf("\t%s->%s\n", key, value)
end
- local dagL = nerv.DAGLayer("dagL", global_conf, {["dim_in"] = dim_in_t, ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
+ local dagL = nerv.DAGLayerT("dagL", global_conf, {["dim_in"] = dim_in_t, ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
["connections"] = connections_t,
})
dagL:init(global_conf.batch_size)
@@ -277,7 +278,7 @@ if (set == "ptb") then
global_conf = {
lrate = 1, wcost = 1e-6, momentum = 0,
cumat_type = nerv.CuMatrixFloat,
- mmat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
hidden_size = 200,
batch_size = 10,