aboutsummaryrefslogtreecommitdiff
path: root/embedding_example
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-08-27 17:38:09 +0800
committerDeterminant <ted.sybil@gmail.com>2015-08-27 17:38:09 +0800
commit8bf9c7575ffeeabb3924e9e02a35afe187071fe2 (patch)
tree54cbcf0f345eb551768fdc431d72a07d705edfb9 /embedding_example
parente97b97e4c684e7f26064bcc0a6440ac5d6cddc47 (diff)
update embedding example to use new global_transf()
Diffstat (limited to 'embedding_example')
-rw-r--r--embedding_example/setup_nerv.lua9
1 files changed, 4 insertions, 5 deletions
diff --git a/embedding_example/setup_nerv.lua b/embedding_example/setup_nerv.lua
index 5ade950..d80c306 100644
--- a/embedding_example/setup_nerv.lua
+++ b/embedding_example/setup_nerv.lua
@@ -11,12 +11,11 @@ local batch_size = 1
network:init(batch_size)
function propagator(input, output)
- local transformed = nerv.speech_utils.global_transf(input,
- global_transf, 0, gconf) -- preprocessing
- local gpu_input = nerv.CuMatrixFloat(transformed:nrow(), transformed:ncol())
+ local transformed = nerv.speech_utils.global_transf(
+ gconf.cumat_type.new_from_host(input),
+ global_transf, 0, 0, gconf) -- preprocessing
+ local gpu_input = transformed
local gpu_output = nerv.CuMatrixFloat(output:nrow(), output:ncol())
- print(transformed)
- gpu_input:copy_fromh(transformed)
network:propagate({gpu_input}, {gpu_output})
gpu_output:copy_toh(output)
-- collect garbage in-time to save GPU memory