diff options
author | Yimmon Zhuang <[email protected]> | 2015-08-27 19:52:21 +0800 |
---|---|---|
committer | Yimmon Zhuang <[email protected]> | 2015-08-27 19:52:21 +0800 |
commit | 667d3ab6a876feac022e889c260dd7abca5199e3 (patch) | |
tree | 54cbcf0f345eb551768fdc431d72a07d705edfb9 | |
parent | a7e57b29248247486ceb0cbfbc9b9e1362592be0 (diff) | |
parent | 8bf9c7575ffeeabb3924e9e02a35afe187071fe2 (diff) |
Merge remote-tracking branch 'upstream/master'
-rw-r--r-- | embedding_example/setup_nerv.lua | 9 |
1 files changed, 4 insertions, 5 deletions
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua index 5ade950..d80c306 100644 --- a/embedding_example/setup_nerv.lua +++ b/embedding_example/setup_nerv.lua @@ -11,12 +11,11 @@ local batch_size = 1 network:init(batch_size) function propagator(input, output) - local transformed = nerv.speech_utils.global_transf(input, - global_transf, 0, gconf) -- preprocessing - local gpu_input = nerv.CuMatrixFloat(transformed:nrow(), transformed:ncol()) + local transformed = nerv.speech_utils.global_transf( + gconf.cumat_type.new_from_host(input), + global_transf, 0, 0, gconf) -- preprocessing + local gpu_input = transformed local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol()) - print(transformed) - gpu_input:copy_fromh(transformed) network:propagate({gpu_input}, {gpu_output}) gpu_output:copy_toh(output) -- collect garbage in-time to save GPU memory |