diff options
author | txh18 <[email protected]> | 2015-05-27 20:26:09 +0800 |
---|---|---|
committer | txh18 <[email protected]> | 2015-05-27 20:26:09 +0800 |
commit | ba66e109a89d5aff4d709cf0865da60d4d873fee (patch) | |
tree | b6f71baa6860bf2b64c34d84965291599b2a0f9b | |
parent | b9a860c021183d4e8138b5fa8e637414d2e47108 (diff) |
modified copy_from(d/h), copy_to(d/h)
-rw-r--r-- | examples/cumatrix_from_mmatrix.lua | 13 | ||||
-rw-r--r-- | matrix/generic/cumatrix.c | 31 |
2 files changed, 36 insertions, 8 deletions
diff --git a/examples/cumatrix_from_mmatrix.lua b/examples/cumatrix_from_mmatrix.lua index fba8a90..1aac1c4 100644 --- a/examples/cumatrix_from_mmatrix.lua +++ b/examples/cumatrix_from_mmatrix.lua @@ -1,5 +1,5 @@ -m = 10 -n = 10 +m = 3 +n = 4 fm = nerv.MMatrixFloat(m, n) dm = nerv.MMatrixDouble(m, n) for i = 0, m - 1 do @@ -15,9 +15,14 @@ print(dm) fc = nerv.CuMatrixFloat(m, n) dc = nerv.CuMatrixDouble(m, n) -fc:copy_from(fm) -dc:copy_from(dm) +fc:copy_fromh(fm) +dc:copy_fromh(dm) +print("fc and dc") print(fc) print(dc) +dc[1]:copy_tod(dc[0]) +print("dc[1] copied to dc[0]") +print(dc) +print("softmax of fc and dc") print(fc:softmax()) print(dc:softmax()) diff --git a/matrix/generic/cumatrix.c b/matrix/generic/cumatrix.c index 2b3b9d4..49e2620 100644 --- a/matrix/generic/cumatrix.c +++ b/matrix/generic/cumatrix.c @@ -169,9 +169,30 @@ static int nerv_matrix_(fill)(lua_State *L) { return 0; } +static int nerv_matrix_(copy_fromd)(lua_State *L) { + Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); + Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname)); + CHECK_SAME_DIMENSION(a, b); + cudaMemcpy2D(MATRIX_ELEM_PTR(a), a->stride, + MATRIX_ELEM_PTR(b), b->stride, + sizeof(MATRIX_ELEM) * b->ncol, b->nrow, + cudaMemcpyDeviceToDevice); + return 0; +} + +static int nerv_matrix_(copy_tod)(lua_State *L) { + Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); + Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname)); + CHECK_SAME_DIMENSION(a, b); + cudaMemcpy2D(MATRIX_ELEM_PTR(b), b->stride, + MATRIX_ELEM_PTR(a), a->stride, + sizeof(MATRIX_ELEM) * a->ncol, a->nrow, + cudaMemcpyDeviceToDevice); + return 0; +} extern const char *MATRIX_CUMATRIX_HOST_TNAME; -static int nerv_matrix_(copy_from)(lua_State *L) { +static int nerv_matrix_(copy_fromh)(lua_State *L) { Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); Matrix *b = luaT_checkudata(L, 2, MATRIX_CUMATRIX_HOST_TNAME); CHECK_SAME_DIMENSION(a, b); @@ -182,7 +203,7 @@ static int nerv_matrix_(copy_from)(lua_State *L) { return 0; } -static int nerv_matrix_(copy_to)(lua_State *L) { +static int nerv_matrix_(copy_toh)(lua_State *L) { Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); Matrix *b = luaT_checkudata(L, 2, MATRIX_CUMATRIX_HOST_TNAME); CHECK_SAME_DIMENSION(a, b); @@ -215,8 +236,10 @@ static const luaL_Reg nerv_matrix_(extra_methods)[] = { {"colsum", nerv_matrix_(colsum)}, {"rowsum", nerv_matrix_(rowsum)}, {"rowmax", nerv_matrix_(rowmax)}, - {"copy_from", nerv_matrix_(copy_from)}, - {"copy_to", nerv_matrix_(copy_to)}, + {"copy_fromh", nerv_matrix_(copy_fromh)}, + {"copy_fromd", nerv_matrix_(copy_fromd)}, + {"copy_toh", nerv_matrix_(copy_toh)}, + {"copy_tod", nerv_matrix_(copy_tod)}, {"trans", nerv_matrix_(trans)}, /* in-place calc */ {"add", nerv_matrix_(add)}, |