aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/unfold_ptb_main.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/unfold_ptb_main.lua')
-rw-r--r--nerv/examples/lmptb/unfold_ptb_main.lua9
1 files changed, 5 insertions, 4 deletions
diff --git a/nerv/examples/lmptb/unfold_ptb_main.lua b/nerv/examples/lmptb/unfold_ptb_main.lua
index 6c4ead3..5affadf 100644
--- a/nerv/examples/lmptb/unfold_ptb_main.lua
+++ b/nerv/examples/lmptb/unfold_ptb_main.lua
@@ -8,6 +8,7 @@
require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
+require 'tnn.init'
nerv.include('lmptb/layer/init.lua')
--[[global function rename]]--
@@ -70,7 +71,7 @@ function prepare_layers(global_conf, paramRepo)
},
["nerv.SelectLinearLayer"] = {
- ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}},
+ ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}},
},
["nerv.SigmoidLayer"] = {
@@ -90,7 +91,7 @@ function prepare_layers(global_conf, paramRepo)
for i = 1, global_conf.bptt do
layers["nerv.IndRecurrentLayer"]["recurrentL" .. (i + 1)] = recurrentLconfig
layers["nerv.SigmoidLayer"]["sigmoidL" .. (i + 1)] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}}}
- layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}}
+ layers["nerv.SelectLinearLayer"]["selectL" .. (i + 1)] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}}
end
local layerRepo = nerv.LayerRepo(layers, paramRepo, global_conf)
printf("%s preparing layers end.\n", global_conf.sche_log_pre)
@@ -138,7 +139,7 @@ function prepare_dagLayer(global_conf, layerRepo)
printf("\t%s->%s\n", key, value)
end
- local dagL = nerv.DAGLayer("dagL", global_conf, {["dim_in"] = dim_in_t, ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
+ local dagL = nerv.DAGLayerT("dagL", global_conf, {["dim_in"] = dim_in_t, ["dim_out"] = {1}, ["sub_layers"] = layerRepo,
["connections"] = connections_t,
})
dagL:init(global_conf.batch_size)
@@ -277,7 +278,7 @@ if (set == "ptb") then
global_conf = {
lrate = 1, wcost = 1e-6, momentum = 0,
cumat_type = nerv.CuMatrixFloat,
- mmat_type = nerv.CuMatrixFloat,
+ mmat_type = nerv.MMatrixFloat,
hidden_size = 200,
batch_size = 10,