From 03439902dbd339cfbbc684b6fcc6b1810fa02ede Mon Sep 17 00:00:00 2001 From: Qi Liu Date: Mon, 9 May 2016 20:51:10 +0800 Subject: fix bug in affine.lua --- nerv/nn/network.lua | 1 - 1 file changed, 1 deletion(-) (limited to 'nerv/nn/network.lua') diff --git a/nerv/nn/network.lua b/nerv/nn/network.lua index bf69ccc..d0d5462 100644 --- a/nerv/nn/network.lua +++ b/nerv/nn/network.lua @@ -416,7 +416,6 @@ function network:make_initial_store() local dim_in, dim_out = self.layers[i]:get_dim() for j = 1, #dim_in do if self.input[t][i][j] == nil then - print(t,i,j,self.layers[i].id) nerv.error('input reference dangling') end if self.err_output[t][i][j] == nil then -- cgit v1.2.3 From 4585970021f75d4c9e7154fc1681a80efa0f48ab Mon Sep 17 00:00:00 2001 From: Qi Liu Date: Mon, 9 May 2016 21:02:57 +0800 Subject: clip on gradient now --- nerv/nn/network.lua | 10 ---------- 1 file changed, 10 deletions(-) (limited to 'nerv/nn/network.lua') diff --git a/nerv/nn/network.lua b/nerv/nn/network.lua index d0d5462..358b100 100644 --- a/nerv/nn/network.lua +++ b/nerv/nn/network.lua @@ -33,8 +33,6 @@ local network = nerv.class('nerv.Network') -- -- * `network`: a `nerv.Layer` instance describing the structure of the network -- to be compiled --- * `clip`: a `number` value indicating the cliping threshold (i.e. preserve --- the values within [-clip, +clip]) -- * `nn_act_default`: a `number` value indicating the value used for filling -- "holes" in activation values of a batch matrix (0 by default) @@ -49,7 +47,6 @@ function network:__init(id, global_conf, network_conf) else self.mat_type = self.gconf.cumat_type end - self.clip = network_conf.clip self.nn_act_default = network_conf.nn_act_default if self.nn_act_default == nil then self.nn_act_default = 0 @@ -660,13 +657,6 @@ function network:back_propagate() local t, id = self.queue[i].chunk, self.queue[i].id if t <= self.max_length then self.layers[id]:back_propagate(self.err_input[t][id], self.err_output[t][id], self.input[t][id], self.output[t][id], t) - -- gradient clip - if self.clip ~= nil then - local dim_in, _ = self.layers[id]:get_dim() - for j = 1, #dim_in do - self.err_output[t][id][j]:clip(-self.clip, self.clip) - end - end end -- flush border gradient if self.flush[t][id].timestamp == self.timestamp then -- cgit v1.2.3