aboutsummaryrefslogtreecommitdiff
path: root/nerv/matrix/generic
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/matrix/generic')
-rw-r--r--nerv/matrix/generic/cumatrix.c294
-rw-r--r--nerv/matrix/generic/matrix.c213
-rw-r--r--nerv/matrix/generic/mmatrix.c60
3 files changed, 308 insertions, 259 deletions
diff --git a/nerv/matrix/generic/cumatrix.c b/nerv/matrix/generic/cumatrix.c
index edd7b0a..cb55901 100644
--- a/nerv/matrix/generic/cumatrix.c
+++ b/nerv/matrix/generic/cumatrix.c
@@ -10,58 +10,11 @@
#include "../../lib/matrix/generic/matrix.h"
#include "../../lib/matrix/generic/cumatrix.h"
-static int nerv_matrix_(lua_add)(lua_State *L) {
- Status status;
- Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
- MATRIX_ELEM alpha = luaL_checknumber(L, 4);
- MATRIX_ELEM beta = luaL_checknumber(L, 5);
- nerv_matrix_(add)(c, a, b, alpha, beta, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_get_cublas_op)(char ch) {
+#define BLAS_OP_N CUBLAS_OP_N
+static int nerv_matrix_(lua_get_blas_op)(char ch) {
return (ch == 'T' || ch == 't') ? CUBLAS_OP_T : CUBLAS_OP_N;
}
-static int nerv_matrix_(lua_mul)(lua_State *L) {
- Status status;
- Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
- MATRIX_ELEM alpha = luaL_checknumber(L, 4);
- MATRIX_ELEM beta = luaL_checknumber(L, 5);
- int nargs = lua_gettop(L);
- int ta = nargs > 5 ? nerv_matrix_(lua_get_cublas_op)(*luaL_checkstring(L, 6)) \
- : CUBLAS_OP_N;
- int tb = nargs > 6 ? nerv_matrix_(lua_get_cublas_op)(*luaL_checkstring(L, 7)) \
- : CUBLAS_OP_N;
- nerv_matrix_(mul)(c, a, b, alpha, beta, ta, tb, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_sigmoid)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- nerv_matrix_(sigmoid)(a, b, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_sigmoid_grad)(lua_State *L) {
- Status status;
- Matrix *nerr = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *err = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *output = luaT_checkudata(L, 3, nerv_matrix_(tname));
- nerv_matrix_(sigmoid_grad)(nerr, err, output, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
static int nerv_matrix_(lua_thres_mask)(lua_State *L) {
Status status;
Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
@@ -101,107 +54,6 @@ static int nerv_matrix_(lua_tanh_grad)(lua_State *L) {
return 0;
}
-static int nerv_matrix_(lua_softmax)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *max_idx = nerv_matrix_(softmax)(b, a, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, max_idx, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_rowsum)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = nerv_matrix_(rowsum)(a, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_colsum)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = nerv_matrix_(colsum)(a, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_colsame)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *ref = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *b = nerv_matrix_(colsame)(a, ref, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_rowmax)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = nerv_matrix_(rowmax)(a, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_rowmax_idx)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b;
- Matrix *idx;
- nerv_matrix_(rowmax_idx)(a, &b, &idx, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- luaT_pushudata(L, idx, nerv_matrix_(tname));
- return 2;
-}
-
-static int nerv_matrix_(lua_add_row)(lua_State *L) {
- Status status;
- const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
- double beta = luaL_checknumber(L, 3);
- nerv_matrix_(add_row)(b, a, beta, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_fill)(lua_State *L) {
- Status status;
- Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
- double val = luaL_checknumber(L, 2);
- nerv_matrix_(fill)(self, val, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_clip)(lua_State *L) {
- Status status;
- Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
- double val_1 = luaL_checknumber(L, 2);
- double val_2 = luaL_checknumber(L, 3);
- nerv_matrix_(clip)(self, val_1, val_2, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_copy_fromd)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- int nargs = lua_gettop(L);
- int b_begin = nargs > 2 ? luaL_checkinteger(L, 3) : 0;
- int b_end = nargs > 3 ? luaL_checkinteger(L, 4) : b->nrow;
- int a_begin = nargs > 4 ? luaL_checkinteger(L, 5) : 0;
- nerv_matrix_(copy_fromd)(a, b, a_begin, b_begin, b_end, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
extern const char *MATRIX_CUMATRIX_HOST_TNAME;
static int nerv_matrix_(lua_copy_fromh)(lua_State *L) {
Status status;
@@ -229,44 +81,19 @@ static int nerv_matrix_(lua_copy_toh)(lua_State *L) {
return 0;
}
-static int nerv_matrix_(lua_trans)(lua_State *L) {
+static int nerv_matrix_(lua_copy_fromd)(lua_State *L) {
Status status;
Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = nerv_matrix_(trans)(a, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
-static int nerv_matrix_(lua_mul_elem)(lua_State *L) {
- Status status;
- const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
- Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
- nerv_matrix_(mul_elem)(c, a, b, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_log_elem)(lua_State *L) {
- Status status;
- const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
- nerv_matrix_(log_elem)(b, a, &status);
+ const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ int nargs = lua_gettop(L);
+ int b_begin = nargs > 2 ? luaL_checkinteger(L, 3) : 0;
+ int b_end = nargs > 3 ? luaL_checkinteger(L, 4) : b->nrow;
+ int a_begin = nargs > 4 ? luaL_checkinteger(L, 5) : 0;
+ nerv_matrix_(copy_fromd)(a, b, a_begin, b_begin, b_end, &status);
NERV_LUA_CHECK_STATUS(L, status);
return 0;
}
-static int nerv_matrix_(lua_decompress)(lua_State *L) {
- Status status;
- const Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- int orig_col = luaL_checkinteger(L, 2);
- Matrix *b = nerv_matrix_(decompress)(a, orig_col, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- luaT_pushudata(L, b, nerv_matrix_(tname));
- return 1;
-}
-
extern const char *nerv_matrix_host_float_tname;
static int nerv_matrix_(lua_copy_rows_fromh_by_idx)(lua_State *L) {
Status status;
@@ -304,44 +131,6 @@ static int nerv_matrix_(lua_copy_rows_fromd_by_colidx)(lua_State *L) {
return 0;
}
-static int nerv_matrix_(lua_expand_frm)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- int context = luaL_checkinteger(L, 3);
- nerv_matrix_(expand_frm)(a, b, context, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_rearrange_frm)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- int step = luaL_checkinteger(L, 3);
- nerv_matrix_(rearrange_frm)(a, b, step, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_scale_rows_by_col)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- nerv_matrix_(scale_rows_by_col)(a, b, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_scale_rows_by_row)(lua_State *L) {
- Status status;
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- nerv_matrix_(scale_rows_by_row)(a, b, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
#ifdef __NERV_FUTURE_CUDA_7
static int nerv_matrix_(lua_update_select_rows_by_rowidx)(lua_State *L) {
/* update c's select rows,
@@ -372,6 +161,39 @@ static int nerv_matrix_(lua_update_select_rows_by_colidx)(lua_State *L) {
}
#endif
+int nerv_matrix_(lua_get_elem)(lua_State *L) {
+ return nerv_error_method_not_implemented(L);
+}
+
+int nerv_matrix_(lua_set_elem)(lua_State *L) {
+ return nerv_error_method_not_implemented(L);
+}
+
+static MATRIX_ELEM cuda_matrix_(read)(lua_State *L, MATRIX_ELEM *data,
+ int idx) {
+ cudaError_t err;
+ MATRIX_ELEM res;
+ err = cudaMemcpy(&res, data + idx,
+ sizeof(MATRIX_ELEM), cudaMemcpyDeviceToHost);
+ if (err != cudaSuccess)
+ nerv_error(L, "cuda error: error while reading element");
+ cudaDeviceSynchronize();
+ return res;
+}
+
+static void cuda_matrix_(write)(lua_State *L, MATRIX_ELEM *data,
+ int idx, MATRIX_ELEM val) {
+ cudaError_t err;
+ err = cudaMemcpy(data + idx, &val,
+ sizeof(MATRIX_ELEM), cudaMemcpyHostToDevice);
+ if (err != cudaSuccess)
+ nerv_error(L, "cuda error: error while writing element");
+ cudaDeviceSynchronize();
+}
+
+static void cuda_matrix_(init)(lua_State *L);
+#include "matrix.c"
+
static const luaL_Reg nerv_matrix_(extra_methods)[] = {
{"colsum", nerv_matrix_(lua_colsum)},
{"colsame", nerv_matrix_(lua_colsame)},
@@ -402,6 +224,7 @@ static const luaL_Reg nerv_matrix_(extra_methods)[] = {
{"thres_mask", nerv_matrix_(lua_thres_mask)},
{"copy_rows_fromh_by_idx", nerv_matrix_(lua_copy_rows_fromh_by_idx)},
{"copy_rows_fromd_by_idx", nerv_matrix_(lua_copy_rows_fromd_by_idx)},
+ {"copy_rows_from_by_idx", nerv_matrix_(lua_copy_rows_fromd_by_idx)},
{"copy_rows_fromd_by_colidx", nerv_matrix_(lua_copy_rows_fromd_by_colidx)},
{"expand_frm", nerv_matrix_(lua_expand_frm)},
{"rearrange_frm", nerv_matrix_(lua_rearrange_frm)},
@@ -421,35 +244,4 @@ static void cuda_matrix_(init)(lua_State *L) {
#endif
}
-int nerv_matrix_(lua_get_elem)(lua_State *L) {
- return nerv_error_method_not_implemented(L);
-}
-
-int nerv_matrix_(lua_set_elem)(lua_State *L) {
- return nerv_error_method_not_implemented(L);
-}
-
-static MATRIX_ELEM cuda_matrix_(read)(lua_State *L, MATRIX_ELEM *data,
- int idx) {
- cudaError_t err;
- MATRIX_ELEM res;
- err = cudaMemcpy(&res, data + idx,
- sizeof(MATRIX_ELEM), cudaMemcpyDeviceToHost);
- if (err != cudaSuccess)
- nerv_error(L, "cuda error: error while reading element");
- cudaDeviceSynchronize();
- return res;
-}
-
-static void cuda_matrix_(write)(lua_State *L, MATRIX_ELEM *data,
- int idx, MATRIX_ELEM val) {
- cudaError_t err;
- err = cudaMemcpy(data + idx, &val,
- sizeof(MATRIX_ELEM), cudaMemcpyHostToDevice);
- if (err != cudaSuccess)
- nerv_error(L, "cuda error: error while writing element");
- cudaDeviceSynchronize();
-}
-
-#include "matrix.c"
#endif
diff --git a/nerv/matrix/generic/matrix.c b/nerv/matrix/generic/matrix.c
index 8efe608..c1da774 100644
--- a/nerv/matrix/generic/matrix.c
+++ b/nerv/matrix/generic/matrix.c
@@ -125,4 +125,217 @@ void nerv_matrix_(lua_init)(lua_State *L) {
#endif
lua_pop(L, 1);
}
+
+static int nerv_matrix_(lua_add)(lua_State *L) {
+ Status status;
+ Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
+ MATRIX_ELEM alpha = luaL_checknumber(L, 4);
+ MATRIX_ELEM beta = luaL_checknumber(L, 5);
+ nerv_matrix_(add)(c, a, b, alpha, beta, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_mul)(lua_State *L) {
+ Status status;
+ Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
+ MATRIX_ELEM alpha = luaL_checknumber(L, 4);
+ MATRIX_ELEM beta = luaL_checknumber(L, 5);
+ int nargs = lua_gettop(L);
+ int ta = nargs > 5 ? nerv_matrix_(lua_get_blas_op)(*luaL_checkstring(L, 6)) \
+ : BLAS_OP_N;
+ int tb = nargs > 6 ? nerv_matrix_(lua_get_blas_op)(*luaL_checkstring(L, 7)) \
+ : BLAS_OP_N;
+ nerv_matrix_(mul)(c, a, b, alpha, beta, ta, tb, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_sigmoid)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ nerv_matrix_(sigmoid)(a, b, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_sigmoid_grad)(lua_State *L) {
+ Status status;
+ Matrix *nerr = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *err = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *output = luaT_checkudata(L, 3, nerv_matrix_(tname));
+ nerv_matrix_(sigmoid_grad)(nerr, err, output, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_softmax)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *max_idx = nerv_matrix_(softmax)(b, a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, max_idx, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_rowsum)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(rowsum)(a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_colsum)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(colsum)(a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_colsame)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *ref = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(colsame)(a, ref, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_rowmax)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(rowmax)(a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_rowmax_idx)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b;
+ Matrix *idx;
+ nerv_matrix_(rowmax_idx)(a, &b, &idx, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ luaT_pushudata(L, idx, nerv_matrix_(tname));
+ return 2;
+}
+
+static int nerv_matrix_(lua_add_row)(lua_State *L) {
+ Status status;
+ const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ double beta = luaL_checknumber(L, 3);
+ nerv_matrix_(add_row)(b, a, beta, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_fill)(lua_State *L) {
+ Status status;
+ Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ double val = luaL_checknumber(L, 2);
+ nerv_matrix_(fill)(self, val, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_clip)(lua_State *L) {
+ Status status;
+ Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ double val_1 = luaL_checknumber(L, 2);
+ double val_2 = luaL_checknumber(L, 3);
+ nerv_matrix_(clip)(self, val_1, val_2, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_trans)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(trans)(a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_mul_elem)(lua_State *L) {
+ Status status;
+ const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
+ Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ nerv_matrix_(mul_elem)(c, a, b, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_log_elem)(lua_State *L) {
+ Status status;
+ const Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ nerv_matrix_(log_elem)(b, a, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_decompress)(lua_State *L) {
+ Status status;
+ const Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ int orig_col = luaL_checkinteger(L, 2);
+ Matrix *b = nerv_matrix_(decompress)(a, orig_col, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
+static int nerv_matrix_(lua_expand_frm)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ int context = luaL_checkinteger(L, 3);
+ nerv_matrix_(expand_frm)(a, b, context, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_rearrange_frm)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ int step = luaL_checkinteger(L, 3);
+ nerv_matrix_(rearrange_frm)(a, b, step, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_scale_rows_by_col)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ nerv_matrix_(scale_rows_by_col)(a, b, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_scale_rows_by_row)(lua_State *L) {
+ Status status;
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ nerv_matrix_(scale_rows_by_row)(a, b, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
#endif
diff --git a/nerv/matrix/generic/mmatrix.c b/nerv/matrix/generic/mmatrix.c
index a4e8489..93562d0 100644
--- a/nerv/matrix/generic/mmatrix.c
+++ b/nerv/matrix/generic/mmatrix.c
@@ -9,9 +9,15 @@
#include "../../lib/common.h"
#include "../../lib/matrix/generic/mmatrix.h"
#include "../../io/chunk_file.h"
-#include "string.h"
+#include <string.h>
+#include <cblas.h>
-int nerv_matrix_(lua_get_elem)(lua_State *L) {
+#define BLAS_OP_N CblasNoTrans
+static int nerv_matrix_(lua_get_blas_op)(char ch) {
+ return (ch == 'T' || ch == 't') ? CblasTrans : CblasNoTrans;
+}
+
+static int nerv_matrix_(lua_get_elem)(lua_State *L) {
Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
int idx = luaL_checkinteger(L, 2);
if (idx < 0 || idx >= self->nmax)
@@ -20,7 +26,7 @@ int nerv_matrix_(lua_get_elem)(lua_State *L) {
return 1;
}
-int nerv_matrix_(lua_set_elem)(lua_State *L) {
+static int nerv_matrix_(lua_set_elem)(lua_State *L) {
Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
int idx = luaL_checkinteger(L, 2);
MATRIX_ELEM v = luaL_checknumber(L, 3);
@@ -40,7 +46,7 @@ static void host_matrix_(init)(lua_State *L) {
#include "matrix.c"
-int nerv_matrix_(lua_load)(lua_State *L) {
+static int nerv_matrix_(lua_load)(lua_State *L) {
Status status;
ChunkData *cdp = luaT_checkudata(L, 1, nerv_chunk_data_tname);
Matrix *self = nerv_matrix_(load)(cdp, &status);
@@ -49,7 +55,7 @@ int nerv_matrix_(lua_load)(lua_State *L) {
return 1;
}
-int nerv_matrix_(lua_save)(lua_State *L) {
+static int nerv_matrix_(lua_save)(lua_State *L) {
Status status;
ChunkFile *cfp = luaT_checkudata(L, 2,
nerv_chunk_file_handle_tname);
@@ -59,7 +65,7 @@ int nerv_matrix_(lua_save)(lua_State *L) {
return 0;
}
-int nerv_matrix_(lua_copy_from)(lua_State *L) {
+static int nerv_matrix_(lua_copy_fromh)(lua_State *L) {
Status status;
Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
const Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
@@ -67,15 +73,53 @@ int nerv_matrix_(lua_copy_from)(lua_State *L) {
int b_begin = nargs > 2 ? luaL_checkinteger(L, 3) : 0;
int b_end = nargs > 3 ? luaL_checkinteger(L, 4) : b->nrow;
int a_begin = nargs > 4 ? luaL_checkinteger(L, 5) : 0;
- nerv_matrix_(copy_from)(a, b, a_begin, b_begin, b_end, &status);
+ nerv_matrix_(copy_fromh)(a, b, a_begin, b_begin, b_end, &status);
NERV_LUA_CHECK_STATUS(L, status);
return 0;
}
+static int nerv_matrix_(lua_copy_rows_fromh_by_idx)(lua_State *L)
+{
+ Status status;
+ Matrix *a=luaT_checkudata(L,1,nerv_matrix_(tname));
+ const Matrix *b=luaT_checkudata(L,2,nerv_matrix_(tname));
+ const Matrix *idx=luaT_checkudata(L,3,nerv_matrix_(tname));
+ int b_begin=lua_gettop(L)>3?luaL_checkinteger(L,4):0;
+ nerv_matrix_(copy_rows_fromh_by_idx)(a,b,idx,b_begin,&status);
+ NERV_LUA_CHECK_STATUS(L,status);
+ return 0;
+}
+
static const luaL_Reg nerv_matrix_(extra_methods)[] = {
+ {"colsum", nerv_matrix_(lua_colsum)},
+ {"colsame", nerv_matrix_(lua_colsame)},
+ {"rowsum", nerv_matrix_(lua_rowsum)},
+ {"rowmax", nerv_matrix_(lua_rowmax)},
+ {"rowmax_idx", nerv_matrix_(lua_rowmax_idx)},
+ {"trans", nerv_matrix_(lua_trans)},
+ {"decompress", nerv_matrix_(lua_decompress)},
+ /* in-place calc */
+ {"copy_fromh", nerv_matrix_(lua_copy_fromh)},
+ /* alias for copy_from */
+ {"copy_from", nerv_matrix_(lua_copy_fromh)},
+ {"add", nerv_matrix_(lua_add)},
+ {"mul", nerv_matrix_(lua_mul)},
+ {"add_row", nerv_matrix_(lua_add_row)},
+ {"clip", nerv_matrix_(lua_clip)},
+ {"fill", nerv_matrix_(lua_fill)},
+ {"sigmoid", nerv_matrix_(lua_sigmoid)},
+ {"sigmoid_grad", nerv_matrix_(lua_sigmoid_grad)},
+ {"softmax", nerv_matrix_(lua_softmax)},
+ {"mul_elem", nerv_matrix_(lua_mul_elem)},
+ {"log_elem", nerv_matrix_(lua_log_elem)},
+ {"copy_rows_fromh_by_idx", nerv_matrix_(lua_copy_rows_fromh_by_idx)},
+ {"copy_rows_from_by_idx", nerv_matrix_(lua_copy_rows_fromh_by_idx)},
+ {"expand_frm", nerv_matrix_(lua_expand_frm)},
+ {"rearrange_frm", nerv_matrix_(lua_rearrange_frm)},
+ {"scale_rows_by_row", nerv_matrix_(lua_scale_rows_by_row)},
+ {"scale_rows_by_col", nerv_matrix_(lua_scale_rows_by_col)},
{"load", nerv_matrix_(lua_load)},
{"save", nerv_matrix_(lua_save)},
- {"copy_from", nerv_matrix_(lua_copy_from)},
{NULL, NULL}
};