aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/m-tests/dagl_test.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/m-tests/dagl_test.lua')
-rw-r--r--nerv/examples/lmptb/m-tests/dagl_test.lua20
1 files changed, 10 insertions, 10 deletions
diff --git a/nerv/examples/lmptb/m-tests/dagl_test.lua b/nerv/examples/lmptb/m-tests/dagl_test.lua
index 02e9c49..9f45b6a 100644
--- a/nerv/examples/lmptb/m-tests/dagl_test.lua
+++ b/nerv/examples/lmptb/m-tests/dagl_test.lua
@@ -98,7 +98,7 @@ end
--global_conf: table
--layerRepo: nerv.LayerRepo
---Returns: a nerv.DAGLayer
+--Returns: a nerv.TDAGLayer
function prepare_dagLayer(global_conf, layerRepo)
printf("%s Initing daglayer ...\n", global_conf.sche_log_pre)
@@ -107,14 +107,14 @@ function prepare_dagLayer(global_conf, layerRepo)
dim_in_t[1] = 1 --input to select_linear layer
dim_in_t[2] = global_conf.vocab:size() --input to softmax label
local connections_t = {
- ["<input>[1]"] = "selectL1[1],0",
- ["selectL1[1]"] = "recurrentL1[1],0",
- ["recurrentL1[1]"] = "sigmoidL1[1],0",
- ["sigmoidL1[1]"] = "outputL[1],0",
- ["sigmoidL1[1]"] = "recurrentL1[2],1",
- ["outputL[1]"] = "softmaxL[1],0",
- ["<input>[2]"] = "softmaxL[2],0",
- ["softmaxL[1]"] = "<output>[1],0"
+ {"<input>[1]", "selectL1[1]", 0},
+ {"selectL1[1]", "recurrentL1[1]", 0},
+ {"recurrentL1[1]", "sigmoidL1[1]", 0},
+ {"sigmoidL1[1]", "outputL[1]", 0},
+ {"sigmoidL1[1]", "recurrentL1[2]", 1},
+ {"outputL[1]", "softmaxL[1]", 0},
+ {"<input>[2]", "softmaxL[2]", 0},
+ {"softmaxL[1]", "<output>[1]", 0}
}
--[[
@@ -127,7 +127,6 @@ function prepare_dagLayer(global_conf, layerRepo)
local dagL = nerv.TDAGLayer("dagL", global_conf, {["dim_in"] = dim_in_t, ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
["connections"] = connections_t,
})
- dagL:init(global_conf.batch_size)
printf("%s Initing DAGLayer end.\n", global_conf.sche_log_pre)
return dagL
end
@@ -162,3 +161,4 @@ global_conf.vocab:build_file(global_conf.train_fn, false)
local paramRepo = prepare_parameters(global_conf, true)
local layerRepo = prepare_layers(global_conf, paramRepo)
local dagL = prepare_dagLayer(global_conf, layerRepo)
+--dagL:init(global_conf.batch_size)