diff options
author | Determinant <[email protected]> | 2015-05-22 10:26:12 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-05-22 10:26:12 +0800 |
commit | 8f18aaba322cf717b8805ebfcacb5a3108d3dda8 (patch) | |
tree | 366b86e2be668da0cf0a7ae430d5d161145b4c16 /matrix/generic/cumatrix.c | |
parent | 7503135d355275a17128f8a4c897739669fcd646 (diff) |
add base class CuMatrix for CuMatrixFloat and CuMatrixDouble
Diffstat (limited to 'matrix/generic/cumatrix.c')
-rw-r--r-- | matrix/generic/cumatrix.c | 38 |
1 files changed, 17 insertions, 21 deletions
diff --git a/matrix/generic/cumatrix.c b/matrix/generic/cumatrix.c index 7afa51a..90c6d6a 100644 --- a/matrix/generic/cumatrix.c +++ b/matrix/generic/cumatrix.c @@ -8,6 +8,7 @@ #define MATRIX_DATA_WRITE(data, idx, val) cuda_matrix_(write)(data, idx, val) #define MATRIX_DATA_READ(data, idx) cuda_matrix_(read)(data, idx) #define MATRIX_INIT(L) cuda_matrix_(init)(L) +#define MATRIX_BASE_TNAME nerv_matrix_cuda_tname #define NERV_GENERIC_MATRIX #define NERV_GENERIC_CUKERNEL #include "../../common.h" @@ -34,30 +35,18 @@ static void nerv_matrix_(add_)(const Matrix *a, const Matrix *b, } static int nerv_matrix_(add)(lua_State *L) { - Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); - Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname)); - Matrix *c; + Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname)); + Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname)); + Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname)); + MATRIX_ELEM alpha = luaL_checknumber(L, 4); /* alpha */ + MATRIX_ELEM beta = luaL_checknumber(L, 5); /* alpha */ if (!(a->nrow == b->nrow && a->ncol == b->ncol)) nerv_error(L, "Matrices should be of the same dimension"); - c = nerv_matrix_(new_)(a->nrow, a->ncol); - nerv_matrix_(add_)(a, b, c, 1.0f, 1.0f); + nerv_matrix_(add_)(a, b, c, alpha, beta); luaT_pushudata(L, c, nerv_matrix_(tname)); return 1; } -static int nerv_matrix_(sub)(lua_State *L) { - Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); - Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname)); - Matrix *c; - if (!(a->nrow == b->nrow && a->ncol == b->ncol)) - nerv_error(L, "Matrices should be of the same dimension"); - c = nerv_matrix_(new_)(a->nrow, a->ncol); - nerv_matrix_(add_)(a, b, c, 1.0f, -1.0f); - luaT_pushudata(L, c, nerv_matrix_(tname)); - return 1; -} - - static int nerv_matrix_(mul)(lua_State *L) { Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname)); @@ -77,6 +66,13 @@ static int nerv_matrix_(mul)(lua_State *L) { return 1; } +static int nerv_matrix_(create)(lua_State *L) { + Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); + Matrix *b = nerv_matrix_(new_)(a->nrow, a->ncol); + luaT_pushudata(L, b, nerv_matrix_(tname)); + return 1; +} + static int nerv_matrix_(sigmoid)(lua_State *L) { Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname)); Matrix *b = nerv_matrix_(new_)(a->nrow, a->ncol); @@ -114,9 +110,9 @@ static int nerv_matrix_(colmax)(lua_State *L) { } static const luaL_Reg nerv_matrix_(extra_methods)[] = { - {"__add__", nerv_matrix_(add)}, - {"__sub__", nerv_matrix_(sub)}, - {"__mul__", nerv_matrix_(mul)}, + {"add", nerv_matrix_(add)}, + {"mul", nerv_matrix_(mul)}, + {"create", nerv_matrix_(create)}, {"sigmoid", nerv_matrix_(sigmoid)}, {"softmax", nerv_matrix_(softmax)}, {"colsum", nerv_matrix_(colsum)}, |