diff options
author | Qi Liu <[email protected]> | 2016-03-11 21:45:49 +0800 |
---|---|---|
committer | Qi Liu <[email protected]> | 2016-03-11 21:45:49 +0800 |
commit | 2660af7f6a9ac243a8ad38bf3375ef0fd292bf52 (patch) | |
tree | 6c8631af2e109f552cb9c80db8f521e6c5c2617c | |
parent | 48e209f519e528c298e3471362451d6b0485abb8 (diff) |
fix bug of dropout
-rw-r--r-- | nerv/layer/dropout.lua | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/layer/dropout.lua b/nerv/layer/dropout.lua index 39a8963..de0fb64 100644 --- a/nerv/layer/dropout.lua +++ b/nerv/layer/dropout.lua @@ -40,7 +40,7 @@ function DropoutLayer:propagate(input, output, t) if t == nil then t = 1 end - if self.gconf.dropout_rate ~= 0 then + if self.gconf.dropout_rate then self.mask[t]:rand_uniform() -- since we will lose a portion of the actvations, we multiply the -- activations by 1 / (1 - rate) to compensate |