diff options
author | txh18 <[email protected]> | 2015-11-30 15:17:14 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-11-30 15:17:14 +0800 |
commit | e6ea10bd32cef61565206358a104d1b17ba162f7 (patch) | |
tree | 709f6031c6501ba7eba3ef30bf31b8586391fa56 | |
parent | 3171a7a1e404cc82857892d0c212824cf74ce2df (diff) |
small opt for initing tnn:clip_t
-rw-r--r-- | nerv/examples/lmptb/tnn/tnn.lua | 3 |
1 files changed, 3 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/tnn.lua b/nerv/examples/lmptb/tnn/tnn.lua index db6cdd7..5448088 100644 --- a/nerv/examples/lmptb/tnn/tnn.lua +++ b/nerv/examples/lmptb/tnn/tnn.lua @@ -91,6 +91,9 @@ end function TNN:__init(id, global_conf, layer_conf) self.clip_t = layer_conf.clip_t + if self.clip_t == nil then + self.clip_t = 0 + end if self.clip_t > 0 then nerv.info("tnn(%s) will clip gradient across time with %f...", id, self.clip_t) end |