diff options
author | txh18 <cloudygooseg@gmail.com> | 2015-12-02 18:00:47 +0800 |
---|---|---|
committer | txh18 <cloudygooseg@gmail.com> | 2015-12-02 18:00:47 +0800 |
commit | 094fc872d3e62c5f0950ac1747f130e30a08bee8 (patch) | |
tree | 2bb0c9df22c5899d9af4062f16c11261f23302dd /nerv/examples/lmptb/tnn/init.lua | |
parent | 41a841f3e0992a578cf5c8f82ae44a552a6f8b2f (diff) |
added dropout_t layer
Diffstat (limited to 'nerv/examples/lmptb/tnn/init.lua')
-rw-r--r-- | nerv/examples/lmptb/tnn/init.lua | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/init.lua b/nerv/examples/lmptb/tnn/init.lua index ddaa6b8..66ea4ed 100644 --- a/nerv/examples/lmptb/tnn/init.lua +++ b/nerv/examples/lmptb/tnn/init.lua @@ -45,6 +45,7 @@ nerv.include('sutil.lua') nerv.include('tnn.lua') nerv.include('layersT/softmax_ce_t.lua') nerv.include('layersT/lstm_t.lua') +nerv.include('layersT/dropout_t.lua') nerv.include('layers/elem_mul.lua') nerv.include('layers/gate_fff.lua') nerv.include('layer_dag_t.lua') |