aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/dropout.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer/dropout.lua')
-rw-r--r--nerv/layer/dropout.lua16
1 files changed, 6 insertions, 10 deletions
diff --git a/nerv/layer/dropout.lua b/nerv/layer/dropout.lua
index 42660cc..1a379c9 100644
--- a/nerv/layer/dropout.lua
+++ b/nerv/layer/dropout.lua
@@ -1,22 +1,18 @@
local DropoutLayer = nerv.class("nerv.DropoutLayer", "nerv.Layer")
function DropoutLayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- if self.gconf.use_cpu then
- self.mat_type = self.gconf.mmat_type
- else
- self.mat_type = self.gconf.cumat_type
- end
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
self.rate = layer_conf.dropout_rate or global_conf.dropout_rate
if self.rate == nil then
nerv.warning("[DropoutLayer:propagate] dropout rate is not set")
end
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
self:check_dim_len(1, 1) -- two inputs: nn output and label
end
+function DropoutLayer:bind_params()
+ -- do nothing
+end
+
function DropoutLayer:init(batch_size, chunk_size)
if self.dim_in[1] ~= self.dim_out[1] then
nerv.error("mismatching dimensions of input and output")
@@ -73,5 +69,5 @@ function DropoutLayer:back_propagate(bp_err, next_bp_err, input, output, t)
end
function DropoutLayer:get_params()
- return nerv.ParamRepo({})
+ return nerv.ParamRepo({}, self.loc_type)
end