diff options
author | Determinant <[email protected]> | 2016-03-26 15:23:58 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2016-03-26 15:23:58 +0800 |
commit | 86dbfcfd490ce3f8fd4591b0950fbea7f1826c70 (patch) | |
tree | b14298e8a020ab110af8cf667e1bb7c01bea693c /nerv/nn | |
parent | 38a2afc7d9c50859e99e09f4f64af3a4254f6f37 (diff) |
fix "not implemented" and lstm rebinding bugsalpha-3.1
Diffstat (limited to 'nerv/nn')
-rw-r--r-- | nerv/nn/layer_repo.lua | 17 |
1 files changed, 14 insertions, 3 deletions
diff --git a/nerv/nn/layer_repo.lua b/nerv/nn/layer_repo.lua index acef54a..647aac9 100644 --- a/nerv/nn/layer_repo.lua +++ b/nerv/nn/layer_repo.lua @@ -29,10 +29,21 @@ function LayerRepo:add_layers(layer_spec, param_repo, global_conf) end function LayerRepo:rebind(param_repo) - for id, layer in pairs(self.layers) do - layer.lconf.pr = param_repo - layer:bind_params() + if self.__rebinding then + return end + self.__rebinding = true + for _, layer in pairs(self.layers) do + if not layer.__already_rebound then + layer.__already_rebound = true + layer.lconf.pr = param_repo + layer:bind_params() + end + end + for _, layer in pairs(self.layers) do + layer.__already_rebound = false + end + self.__rebinding = false end function LayerRepo:get_layer(lid) |