diff options
author | txh18 <[email protected]> | 2015-11-08 18:02:08 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-11-08 18:02:08 +0800 |
commit | d9803ab152a0152f39ce2efa55004e98a9b6aa2d (patch) | |
tree | 07fbce8ae94ab457c7bd86d8b3f1799248c66d75 | |
parent | a8d369d49933ffdd306f47db6b74e0d27deda5d0 (diff) |
ih[0] nolonger zero
-rw-r--r-- | nerv/examples/lmptb/m-tests/tnn_test.lua | 1 |
1 files changed, 0 insertions, 1 deletions
diff --git a/nerv/examples/lmptb/m-tests/tnn_test.lua b/nerv/examples/lmptb/m-tests/tnn_test.lua index 888ba0f..a778dea 100644 --- a/nerv/examples/lmptb/m-tests/tnn_test.lua +++ b/nerv/examples/lmptb/m-tests/tnn_test.lua @@ -19,7 +19,6 @@ function prepare_parameters(global_conf, first_time) ltp_ih = nerv.LinearTransParam("ltp_ih", global_conf) ltp_ih.trans = global_conf.cumat_type(global_conf.vocab:size(), global_conf.hidden_size) --index 0 is for zero, others correspond to vocab index(starting from 1) ltp_ih.trans:generate(global_conf.param_random) - ltp_ih.trans[0]:fill(0) ltp_hh = nerv.LinearTransParam("ltp_hh", global_conf) ltp_hh.trans = global_conf.cumat_type(global_conf.hidden_size, global_conf.hidden_size) |