diff options
-rw-r--r-- | nerv/examples/lmptb/tnn/tnn.lua | 3 |
1 files changed, 3 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/tnn.lua b/nerv/examples/lmptb/tnn/tnn.lua index db6cdd7..5448088 100644 --- a/nerv/examples/lmptb/tnn/tnn.lua +++ b/nerv/examples/lmptb/tnn/tnn.lua @@ -91,6 +91,9 @@ end function TNN:__init(id, global_conf, layer_conf) self.clip_t = layer_conf.clip_t + if self.clip_t == nil then + self.clip_t = 0 + end if self.clip_t > 0 then nerv.info("tnn(%s) will clip gradient across time with %f...", id, self.clip_t) end |