aboutsummaryrefslogtreecommitdiff
path: root/nerv/nn/trainer.lua
diff options
context:
space:
mode:
authorQi Liu <liuq901@163.com>2016-05-12 17:41:21 +0800
committerQi Liu <liuq901@163.com>2016-05-12 17:41:21 +0800
commitc0fdb7ee2966546023410bb03e62dee0cf64e0e1 (patch)
treecd90688b8aee2991a78f971e1bc4e0e9596e357b /nerv/nn/trainer.lua
parentd88a57f4852c50a2678de950ee650ed9b6a895f0 (diff)
parent10916f721a945a5edd052ab93027413fd3c01f65 (diff)
Merge branch 'master' into 'master' beta-1.2
multiple change 1. Merge affine & lstm_gate & projection layer 2. Change clip behavior 3. Seq_buffer support sequence level shuffle 4. LSTM & LSTMP layer support multiple input See merge request !4
Diffstat (limited to 'nerv/nn/trainer.lua')
-rw-r--r--nerv/nn/trainer.lua5
1 files changed, 4 insertions, 1 deletions
diff --git a/nerv/nn/trainer.lua b/nerv/nn/trainer.lua
index 44390ea..a17b36c 100644
--- a/nerv/nn/trainer.lua
+++ b/nerv/nn/trainer.lua
@@ -25,7 +25,7 @@ function trainer:__init(gconf)
self.input_order = self:get_input_order()
self.network = nerv.Network('network', gconf,
{network = graph,
- clip = gconf.clip})
+ nn_act_default = gconf.nn_act_default})
local network = self.network
network:init(gconf.batch_size, gconf.chunk_size)
@@ -77,9 +77,12 @@ function trainer:make_buffer(readers)
})
else
return nerv.SeqBuffer(gconf, {
+ buffer_size = gconf.buffer_size,
batch_size = gconf.batch_size,
chunk_size = gconf.chunk_size,
+ randomize = gconf.randomize,
readers = readers,
+ nn_act_default = gconf.nn_act_default,
})
end
end