aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-12-06 19:31:38 +0800
committertxh18 <cloudygooseg@gmail.com>2015-12-06 19:31:38 +0800
commit27cf4ef1965ec900bb15b32c2b8de65a65c0cd8b (patch)
treed61a4f3e1ea8f27338db99cac55980d7305eb10c
parent4e3cf34a29afbdb17c42ab7f1efacec52efd0e8b (diff)
some log chang
-rw-r--r--nerv/examples/lmptb/lm_trainer.lua5
-rw-r--r--nerv/tnn/layer_dag_t.lua2
-rw-r--r--nerv/tnn/tnn.lua2
3 files changed, 6 insertions, 3 deletions
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua
index 0ccd847..3b8b5c3 100644
--- a/nerv/examples/lmptb/lm_trainer.lua
+++ b/nerv/examples/lmptb/lm_trainer.lua
@@ -53,7 +53,8 @@ function LMTrainer.lm_process_file_rnn(global_conf, fn, tnn, do_train, p_conf)
local next_log_wcn = global_conf.log_w_num
local neto_bakm = global_conf.mmat_type(batch_size, 1) --space backup matrix for network output
-
+
+ nerv.info("LMTrainer.lm_process_file_rnn: begin processing...")
while (1) do
global_conf.timer:tic('most_out_loop_lmprocessfile')
@@ -184,6 +185,8 @@ function LMTrainer.lm_process_file_birnn(global_conf, fn, tnn, do_train, p_conf)
local next_log_wcn = global_conf.log_w_num
local neto_bakm = global_conf.mmat_type(batch_size, 1) --space backup matrix for network output
+ nerv.info("LMTrainer.lm_process_file_birnn: begin processing...")
+
while (1) do
global_conf.timer:tic('most_out_loop_lmprocessfile')
diff --git a/nerv/tnn/layer_dag_t.lua b/nerv/tnn/layer_dag_t.lua
index e3a9316..b651f4e 100644
--- a/nerv/tnn/layer_dag_t.lua
+++ b/nerv/tnn/layer_dag_t.lua
@@ -142,7 +142,7 @@ function DAGLayerT:__init(id, global_conf, layer_conf)
end
function DAGLayerT:init(batch_size, chunk_size)
- nerv.info("initing DAGLayerT %s...\n", self.id)
+ nerv.info("initing DAGLayerT %s...", self.id)
if chunk_size == nil then
chunk_size = 1
nerv.info("(Initing DAGLayerT) chunk_size is nil, setting it to default 1\n")
diff --git a/nerv/tnn/tnn.lua b/nerv/tnn/tnn.lua
index beb73ca..cf02123 100644
--- a/nerv/tnn/tnn.lua
+++ b/nerv/tnn/tnn.lua
@@ -178,7 +178,7 @@ function TNN:init(batch_size, chunk_size)
nerv.error("layer %s has a zero dim port", ref_from.layer.id)
end
- print("TNN initing storage", ref_from.layer.id, "->", ref_to.layer.id)
+ nerv.info("TNN initing storage %s->%s", ref_from.layer.id, ref_to.layer.id)
ref_to.inputs_matbak_p[port_to] = self.gconf.cumat_type(batch_size, dim)
self.make_initial_store(ref_from.outputs_m, port_from, dim, batch_size, chunk_size, self.extend_t, self.gconf, ref_to.inputs_m, port_to, time)
ref_from.err_inputs_matbak_p[port_from] = self.gconf.cumat_type(batch_size, dim)