From 9791a4c03cd6ae0f7403daa8f6ba84b6c523c5a7 Mon Sep 17 00:00:00 2001 From: Determinant Date: Thu, 28 May 2015 12:59:30 +0800 Subject: change softmax to in-place operation and fix memory leak --- examples/cumatrix_example.lua | 6 ++++-- examples/cumatrix_from_mmatrix.lua | 8 ++++++-- matrix/generic/cumatrix.c | 10 ++++++---- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/examples/cumatrix_example.lua b/examples/cumatrix_example.lua index f8235eb..084dcca 100644 --- a/examples/cumatrix_example.lua +++ b/examples/cumatrix_example.lua @@ -11,10 +11,12 @@ for i = 0, m - 1 do end end print(fm) -fs = fm:softmax() +fs = fm:create() +fs:softmax(fm) -- print(fs) print(dm) -ds = dm:softmax() +ds = dm:create() +ds:softmax(dm) -- print(ds) print(fs) print(fs + fs) diff --git a/examples/cumatrix_from_mmatrix.lua b/examples/cumatrix_from_mmatrix.lua index fba8a90..964d008 100644 --- a/examples/cumatrix_from_mmatrix.lua +++ b/examples/cumatrix_from_mmatrix.lua @@ -19,5 +19,9 @@ fc:copy_from(fm) dc:copy_from(dm) print(fc) print(dc) -print(fc:softmax()) -print(dc:softmax()) +sfc = fc:create() +sdc = dc:create() +sfc:softmax(fc) +print(sfc) +sdc:softmax(dc) +print(sdc) diff --git a/matrix/generic/cumatrix.c b/matrix/generic/cumatrix.c index f24280c..d350162 100644 --- a/matrix/generic/cumatrix.c +++ b/matrix/generic/cumatrix.c @@ -27,6 +27,7 @@ static cublasHandle_t cublas_handle; Matrix *nerv_matrix_(new_)(long nrow, long ncol); +void nerv_matrix_(data_free)(Matrix *self); static void nerv_matrix_(add_)(const Matrix *a, const Matrix *b, const Matrix *c, @@ -113,15 +114,16 @@ static int nerv_matrix_(sigmoid_grad)(lua_State *L) { } static int nerv_matrix_(softmax)(lua_State *L) { - Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); + Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname)); + Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname)); Matrix *max = nerv_matrix_(new_)(a->nrow, 1); Matrix *dno = nerv_matrix_(new_)(a->nrow, 1); - Matrix *b = nerv_matrix_(new_)(a->nrow, a->ncol); cudak_(cuda_rowmax)(a, max); cudak_(cuda_softmax_denominator)(a, max, dno); cudak_(cuda_softmax_final)(a, max, dno, b); - luaT_pushudata(L, b, nerv_matrix_(tname)); - return 1; + nerv_matrix_(data_free)(max); + nerv_matrix_(data_free)(dno); + return 0; } static int nerv_matrix_(rowsum)(lua_State *L) { -- cgit v1.2.3