diff options
author | Qi Liu <[email protected]> | 2016-05-12 17:41:21 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-05-12 17:41:21 +0800 |
commit | c0fdb7ee2966546023410bb03e62dee0cf64e0e1 (patch) | |
tree | cd90688b8aee2991a78f971e1bc4e0e9596e357b /nerv/nn/network.lua | |
parent | d88a57f4852c50a2678de950ee650ed9b6a895f0 (diff) | |
parent | 10916f721a945a5edd052ab93027413fd3c01f65 (diff) |
Merge branch 'master' into 'master' beta-1.2
multiple change
1. Merge affine & lstm_gate & projection layer
2. Change clip behavior
3. Seq_buffer support sequence level shuffle
4. LSTM & LSTMP layer support multiple input
See merge request !4
Diffstat (limited to 'nerv/nn/network.lua')
-rw-r--r-- | nerv/nn/network.lua | 11 |
1 files changed, 0 insertions, 11 deletions
diff --git a/nerv/nn/network.lua b/nerv/nn/network.lua index bf69ccc..358b100 100644 --- a/nerv/nn/network.lua +++ b/nerv/nn/network.lua @@ -33,8 +33,6 @@ local network = nerv.class('nerv.Network') -- -- * `network`: a `nerv.Layer` instance describing the structure of the network -- to be compiled --- * `clip`: a `number` value indicating the cliping threshold (i.e. preserve --- the values within [-clip, +clip]) -- * `nn_act_default`: a `number` value indicating the value used for filling -- "holes" in activation values of a batch matrix (0 by default) @@ -49,7 +47,6 @@ function network:__init(id, global_conf, network_conf) else self.mat_type = self.gconf.cumat_type end - self.clip = network_conf.clip self.nn_act_default = network_conf.nn_act_default if self.nn_act_default == nil then self.nn_act_default = 0 @@ -416,7 +413,6 @@ function network:make_initial_store() local dim_in, dim_out = self.layers[i]:get_dim() for j = 1, #dim_in do if self.input[t][i][j] == nil then - print(t,i,j,self.layers[i].id) nerv.error('input reference dangling') end if self.err_output[t][i][j] == nil then @@ -661,13 +657,6 @@ function network:back_propagate() local t, id = self.queue[i].chunk, self.queue[i].id if t <= self.max_length then self.layers[id]:back_propagate(self.err_input[t][id], self.err_output[t][id], self.input[t][id], self.output[t][id], t) - -- gradient clip - if self.clip ~= nil then - local dim_in, _ = self.layers[id]:get_dim() - for j = 1, #dim_in do - self.err_output[t][id][j]:clip(-self.clip, self.clip) - end - end end -- flush border gradient if self.flush[t][id].timestamp == self.timestamp then |