diff options
-rw-r--r-- | nerv/layer/dropout.lua | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/nerv/layer/dropout.lua b/nerv/layer/dropout.lua index 39a8963..de0fb64 100644 --- a/nerv/layer/dropout.lua +++ b/nerv/layer/dropout.lua @@ -40,7 +40,7 @@ function DropoutLayer:propagate(input, output, t) if t == nil then t = 1 end - if self.gconf.dropout_rate ~= 0 then + if self.gconf.dropout_rate then self.mask[t]:rand_uniform() -- since we will lose a portion of the actvations, we multiply the -- activations by 1 / (1 - rate) to compensate |