diff options
author | Determinant <[email protected]> | 2015-08-27 17:38:09 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-08-27 17:38:09 +0800 |
commit | 8bf9c7575ffeeabb3924e9e02a35afe187071fe2 (patch) | |
tree | 54cbcf0f345eb551768fdc431d72a07d705edfb9 | |
parent | e97b97e4c684e7f26064bcc0a6440ac5d6cddc47 (diff) |
update embedding example to use new global_transf()
-rw-r--r-- | embedding_example/setup_nerv.lua | 9 |
1 files changed, 4 insertions, 5 deletions
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua index 5ade950..d80c306 100644 --- a/embedding_example/setup_nerv.lua +++ b/embedding_example/setup_nerv.lua @@ -11,12 +11,11 @@ local batch_size = 1 network:init(batch_size) function propagator(input, output) - local transformed = nerv.speech_utils.global_transf(input, - global_transf, 0, gconf) -- preprocessing - local gpu_input = nerv.CuMatrixFloat(transformed:nrow(), transformed:ncol()) + local transformed = nerv.speech_utils.global_transf( + gconf.cumat_type.new_from_host(input), + global_transf, 0, 0, gconf) -- preprocessing + local gpu_input = transformed local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol()) - print(transformed) - gpu_input:copy_fromh(transformed) network:propagate({gpu_input}, {gpu_output}) gpu_output:copy_toh(output) -- collect garbage in-time to save GPU memory |