diff options
-rw-r--r-- | nerv/examples/lmptb/main.lua | 2 | ||||
-rw-r--r-- | nerv/nn/layer_dag.lua | 2 |
2 files changed, 3 insertions, 1 deletions
diff --git a/nerv/examples/lmptb/main.lua b/nerv/examples/lmptb/main.lua index 74ce407..9b39e83 100644 --- a/nerv/examples/lmptb/main.lua +++ b/nerv/examples/lmptb/main.lua @@ -268,7 +268,7 @@ if (set == "ptb") then hidden_size = 200, batch_size = 10, - bptt = 6, --train bptt_block's words. could be set to zero + bptt = 3, --train bptt_block's words. could be set to zero max_iter = 18, param_random = function() return (math.random() / 5 - 0.1) end, independent = true, diff --git a/nerv/nn/layer_dag.lua b/nerv/nn/layer_dag.lua index 73bb77d..91818d6 100644 --- a/nerv/nn/layer_dag.lua +++ b/nerv/nn/layer_dag.lua @@ -262,7 +262,9 @@ function DAGLayer:propagate(input, output) for i = 1, #self.queue do local ref = self.queue[i] -- print(ref.layer.id) + self.gconf.timer:tic("(propagate)"..ref.layer.id); ret = ref.layer:propagate(ref.inputs, ref.outputs) + self.gconf.timer:toc("(propagate)"..ref.layer.id); end return ret end |