aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortxh18 <[email protected]>2015-11-20 21:49:33 +0800
committertxh18 <[email protected]>2015-11-20 21:49:33 +0800
commitddcb0a8f3ee045910acc618177dc5baf7adb8bf3 (patch)
tree0db82eb0b0ea0418803ed55c19dfc8eccc52b76a
parent4f5b45b79b8d5f6a9094888cf6b929fe86ac24a3 (diff)
complete auto-generate params
-rw-r--r--nerv/examples/lmptb/lmptb/layer/select_linear.lua4
-rw-r--r--nerv/examples/lmptb/rnn/layers/gate_fff.lua14
-rw-r--r--nerv/examples/lmptb/tnn_ptb_main.lua16
-rw-r--r--nerv/layer/affine_recurrent.lua4
4 files changed, 21 insertions, 17 deletions
diff --git a/nerv/examples/lmptb/lmptb/layer/select_linear.lua b/nerv/examples/lmptb/lmptb/layer/select_linear.lua
index e96296f..580b9c5 100644
--- a/nerv/examples/lmptb/lmptb/layer/select_linear.lua
+++ b/nerv/examples/lmptb/lmptb/layer/select_linear.lua
@@ -10,9 +10,9 @@ function SL:__init(id, global_conf, layer_conf)
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
- self.ltp = layer_conf.ltp
self.vocab = layer_conf.vocab
-
+ self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab:size(), self.dim_out[1]}) --layer_conf.ltp
+
self:check_dim_len(1, 1)
end
diff --git a/nerv/examples/lmptb/rnn/layers/gate_fff.lua b/nerv/examples/lmptb/rnn/layers/gate_fff.lua
index 74e19ce..6a588fc 100644
--- a/nerv/examples/lmptb/rnn/layers/gate_fff.lua
+++ b/nerv/examples/lmptb/rnn/layers/gate_fff.lua
@@ -1,6 +1,6 @@
local GateFFFLayer = nerv.class('nerv.GateFFFLayer', 'nerv.Layer')
-function AffineLayer:__init(id, global_conf, layer_conf)
+function GateFFFLayer:__init(id, global_conf, layer_conf)
self.id = id
self.ltp = layer_conf.ltp
self.bp = layer_conf.bp
@@ -10,7 +10,7 @@ function AffineLayer:__init(id, global_conf, layer_conf)
self:check_dim_len(1, 1) -- exactly one input and one output
end
-function AffineLayer:init(batch_size)
+function GateFFFLayer:init(batch_size)
if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
nerv.error("mismatching dimensions of linear transform and bias paramter")
end
@@ -25,11 +25,11 @@ function AffineLayer:init(batch_size)
self.bp:train_init()
end
-function AffineLayer:batch_resize(batch_size)
+function GateFFFLayer:batch_resize(batch_size)
-- do nothing
end
-function AffineLayer:update(bp_err, input, output)
+function GateFFFLayer:update(bp_err, input, output)
if self.direct_update == true then
local gconf = self.gconf
if gconf.momentum > 0 then
@@ -51,17 +51,17 @@ function AffineLayer:update(bp_err, input, output)
end
end
-function AffineLayer:propagate(input, output)
+function GateFFFLayer:propagate(input, output)
-- apply linear transform
output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
-- add bias
output[1]:add_row(self.bp.trans, 1.0)
end
-function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
+function GateFFFLayer:back_propagate(bp_err, next_bp_err, input, output)
next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
end
-function AffineLayer:get_params()
+function GateFFFLayer:get_params()
return nerv.ParamRepo({self.ltp, self.bp})
end
diff --git a/nerv/examples/lmptb/tnn_ptb_main.lua b/nerv/examples/lmptb/tnn_ptb_main.lua
index 6afecbf..3096a3f 100644
--- a/nerv/examples/lmptb/tnn_ptb_main.lua
+++ b/nerv/examples/lmptb/tnn_ptb_main.lua
@@ -21,7 +21,10 @@ function prepare_parameters(global_conf, iter)
local paramRepo = global_conf.paramRepo
if iter == -1 then --first time
- printf("%s first time, generating parameters...\n", global_conf.sche_log_pre)
+ printf("%s first time, prepare some pre-set parameters, and leaving other parameters to auto-generation...\n", global_conf.sche_log_pre)
+ local f = nerv.ChunkFile(global_conf.param_fn .. '.0', 'w')
+ f:close()
+ --[[
ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf)
ltp_ih.trans = global_conf.cumat_type(global_conf.vocab:size(), global_conf.hidden_size) --index 0 is for zero, others correspond to vocab index(starting from 1)
ltp_ih.trans:generate(global_conf.param_random)
@@ -49,7 +52,7 @@ function prepare_parameters(global_conf, iter)
f:write_chunk(bp_h)
--f:write_chunk(bp_o)
f:close()
-
+ ]]--
return nil
end
@@ -71,7 +74,7 @@ function prepare_layers(global_conf)
local du = false
--local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["break_id"] = global_conf.vocab:get_sen_entry().id, ["independent"] = global_conf.independent, ["clip"] = 10}}
- local recurrentLconfig = {{["bp"] = "bp_h", ["ltp_hh"] = "ltp_hh"}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}}
+ local recurrentLconfig = {{}, {["dim_in"] = {global_conf.hidden_size, global_conf.hidden_size}, ["dim_out"] = {global_conf.hidden_size}, ["clip"] = 10, ["direct_update"] = du}}
local layers = {
["nerv.AffineRecurrentLayer"] = {
@@ -79,7 +82,7 @@ function prepare_layers(global_conf)
},
["nerv.SelectLinearLayer"] = {
- ["selectL1"] = {{["ltp"] = "ltp_ih"}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}}},
+ ["selectL1"] = {{}, {["dim_in"] = {1}, ["dim_out"] = {global_conf.hidden_size}, ["vocab"] = global_conf.vocab}},
},
["nerv.SigmoidLayer"] = {
@@ -91,7 +94,7 @@ function prepare_layers(global_conf)
},
["nerv.AffineLayer"] = {
- ["outputL"] = {{["ltp"] = "ltp_ho", ["bp"] = "bp_o"}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}},
+ ["outputL"] = {{}, {["dim_in"] = {global_conf.hidden_size}, ["dim_out"] = {global_conf.vocab:size()}, ["direct_update"] = du}},
},
["nerv.SoftmaxCELayerT"] = {
@@ -296,12 +299,13 @@ global_conf.vocab:build_file(global_conf.vocab_fn, false)
ppl_rec = {}
if start_iter == -1 then
- prepare_parameters(global_conf, -1) --randomly generate parameters
+ prepare_parameters(global_conf, -1) --write pre_generated params to param.0 file
end
if start_iter == -1 or start_iter == 0 then
print("===INITIAL VALIDATION===")
local tnn = load_net(global_conf, 0)
+ global_conf.paramRepo:export(global_conf.param_fn .. '.0', nil) --some parameters are auto-generated, saved again to param.0 file
local result = LMTrainer.lm_process_file(global_conf, global_conf.valid_fn, tnn, false) --false update!
nerv.LMUtil.wait(1)
ppl_rec[0] = {}
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
index da189e0..d537f4a 100644
--- a/nerv/layer/affine_recurrent.lua
+++ b/nerv/layer/affine_recurrent.lua
@@ -10,8 +10,8 @@ function Recurrent:__init(id, global_conf, layer_conf)
self.dim_out = layer_conf.dim_out
self.gconf = global_conf
- self.bp = layer_conf.bp
- self.ltp_hh = layer_conf.ltp_hh --from hidden to hidden
+ self.bp = self:find_param("bp", layer_conf, global_conf, nerv.BiasParam, {1, self.dim_out[1]}) --layer_conf.bp
+ self.ltp_hh = self:find_param("ltp_hh", layer_conf, global_conf, nerv.LinearTransParam, {self.dim_in[2], self.dim_out[1]}) --layer_conf.ltp_hh --from hidden to hidden
self:check_dim_len(2, 1)
self.direct_update = layer_conf.direct_update