diff options
author | Determinant <ted.sybil@gmail.com> | 2016-06-08 14:37:21 +0800 |
---|---|---|
committer | Determinant <ted.sybil@gmail.com> | 2016-06-08 14:37:21 +0800 |
commit | 1d7af85c108d0e87b986db2dd23d524735a8b279 (patch) | |
tree | 24ba36d82f092390e133fce0eb0abe6647bb73e0 /nerv/nn/network.lua | |
parent | b7cdd5da65a3e4ae58ffcfdf74710cfb1ee6327f (diff) | |
parent | d58b7adf0acd68921ec2d38d5929bf68406d4982 (diff) |
Merge remote-tracking branch 'lab/master'
Diffstat (limited to 'nerv/nn/network.lua')
-rw-r--r-- | nerv/nn/network.lua | 11 |
1 files changed, 0 insertions, 11 deletions
diff --git a/nerv/nn/network.lua b/nerv/nn/network.lua index bf69ccc..358b100 100644 --- a/nerv/nn/network.lua +++ b/nerv/nn/network.lua @@ -33,8 +33,6 @@ local network = nerv.class('nerv.Network') -- -- * `network`: a `nerv.Layer` instance describing the structure of the network -- to be compiled --- * `clip`: a `number` value indicating the cliping threshold (i.e. preserve --- the values within [-clip, +clip]) -- * `nn_act_default`: a `number` value indicating the value used for filling -- "holes" in activation values of a batch matrix (0 by default) @@ -49,7 +47,6 @@ function network:__init(id, global_conf, network_conf) else self.mat_type = self.gconf.cumat_type end - self.clip = network_conf.clip self.nn_act_default = network_conf.nn_act_default if self.nn_act_default == nil then self.nn_act_default = 0 @@ -416,7 +413,6 @@ function network:make_initial_store() local dim_in, dim_out = self.layers[i]:get_dim() for j = 1, #dim_in do if self.input[t][i][j] == nil then - print(t,i,j,self.layers[i].id) nerv.error('input reference dangling') end if self.err_output[t][i][j] == nil then @@ -661,13 +657,6 @@ function network:back_propagate() local t, id = self.queue[i].chunk, self.queue[i].id if t <= self.max_length then self.layers[id]:back_propagate(self.err_input[t][id], self.err_output[t][id], self.input[t][id], self.output[t][id], t) - -- gradient clip - if self.clip ~= nil then - local dim_in, _ = self.layers[id]:get_dim() - for j = 1, #dim_in do - self.err_output[t][id][j]:clip(-self.clip, self.clip) - end - end end -- flush border gradient if self.flush[t][id].timestamp == self.timestamp then |