aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-05-28 12:59:30 +0800
committerDeterminant <[email protected]>2015-05-28 12:59:30 +0800
commit9791a4c03cd6ae0f7403daa8f6ba84b6c523c5a7 (patch)
treefa35de648494ec3e6be43cd3ae8a9ee12070e7f7
parent51ee28426b992b1421ded46721ade7a57b0a1896 (diff)
change softmax to in-place operation and fix memory leak
-rw-r--r--examples/cumatrix_example.lua6
-rw-r--r--examples/cumatrix_from_mmatrix.lua8
-rw-r--r--matrix/generic/cumatrix.c10
3 files changed, 16 insertions, 8 deletions
diff --git a/examples/cumatrix_example.lua b/examples/cumatrix_example.lua
index f8235eb..084dcca 100644
--- a/examples/cumatrix_example.lua
+++ b/examples/cumatrix_example.lua
@@ -11,10 +11,12 @@ for i = 0, m - 1 do
end
end
print(fm)
-fs = fm:softmax()
+fs = fm:create()
+fs:softmax(fm)
-- print(fs)
print(dm)
-ds = dm:softmax()
+ds = dm:create()
+ds:softmax(dm)
-- print(ds)
print(fs)
print(fs + fs)
diff --git a/examples/cumatrix_from_mmatrix.lua b/examples/cumatrix_from_mmatrix.lua
index fba8a90..964d008 100644
--- a/examples/cumatrix_from_mmatrix.lua
+++ b/examples/cumatrix_from_mmatrix.lua
@@ -19,5 +19,9 @@ fc:copy_from(fm)
dc:copy_from(dm)
print(fc)
print(dc)
-print(fc:softmax())
-print(dc:softmax())
+sfc = fc:create()
+sdc = dc:create()
+sfc:softmax(fc)
+print(sfc)
+sdc:softmax(dc)
+print(sdc)
diff --git a/matrix/generic/cumatrix.c b/matrix/generic/cumatrix.c
index f24280c..d350162 100644
--- a/matrix/generic/cumatrix.c
+++ b/matrix/generic/cumatrix.c
@@ -27,6 +27,7 @@
static cublasHandle_t cublas_handle;
Matrix *nerv_matrix_(new_)(long nrow, long ncol);
+void nerv_matrix_(data_free)(Matrix *self);
static void nerv_matrix_(add_)(const Matrix *a, const Matrix *b,
const Matrix *c,
@@ -113,15 +114,16 @@ static int nerv_matrix_(sigmoid_grad)(lua_State *L) {
}
static int nerv_matrix_(softmax)(lua_State *L) {
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
Matrix *max = nerv_matrix_(new_)(a->nrow, 1);
Matrix *dno = nerv_matrix_(new_)(a->nrow, 1);
- Matrix *b = nerv_matrix_(new_)(a->nrow, a->ncol);
cudak_(cuda_rowmax)(a, max);
cudak_(cuda_softmax_denominator)(a, max, dno);
cudak_(cuda_softmax_final)(a, max, dno, b);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
+ nerv_matrix_(data_free)(max);
+ nerv_matrix_(data_free)(dno);
+ return 0;
}
static int nerv_matrix_(rowsum)(lua_State *L) {