aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-05-26 23:58:32 +0800
committerDeterminant <[email protected]>2015-05-26 23:58:32 +0800
commitf8543464c13584e39bfacee694ee1ed80ac121f4 (patch)
tree3e29ffd5205659fbf3f908b5406522e4bab1c2e9
parent910640c0ef7c43d586180241f79723973e0e7d35 (diff)
fix a severe bug in memory management of userdata
-rw-r--r--io/init.lua3
-rw-r--r--io/param.c10
-rw-r--r--layer/affine.lua22
-rw-r--r--layer/init.lua4
-rw-r--r--matrix/generic/cumatrix.c35
-rw-r--r--matrix/generic/matrix.c10
-rw-r--r--matrix/init.lua2
7 files changed, 64 insertions, 22 deletions
diff --git a/io/init.lua b/io/init.lua
index d96f062..dc1c6c3 100644
--- a/io/init.lua
+++ b/io/init.lua
@@ -22,7 +22,8 @@ function nerv.ParamFile:read_param(id)
if metadata == nil then
nerv_error("param with id %s does not exist", id)
end
- local param = assert(loadstring("return " .. metadata.type .. "(" .. id .. ")"))()
+ local param = assert(loadstring("return " ..
+ metadata.type .. "(\"" .. id .. "\")"))()
param:set_info(metadata.info)
param:read(self:get_chunkdata(id))
return param
diff --git a/io/param.c b/io/param.c
index 477df28..a57b183 100644
--- a/io/param.c
+++ b/io/param.c
@@ -77,7 +77,8 @@ ParamChunkData *get_param_chunk_data(FILE *fp, ParamChunkInfo *info) {
pcd->data = (char *)malloc(info->length);
pcd->fp = fmemopen(pcd->data, info->length, "r");
assert(fseeko(fp, info->offset, SEEK_SET) == 0);
- assert(fread(pcd->data, 1, info->length, fp) == (size_t)info->length);
+ if (fread(pcd->data, 1, info->length, fp) != (size_t)info->length)
+ return NULL;
return pcd;
}
@@ -239,6 +240,7 @@ int nerv_param_file_write_chunkdata(lua_State *L) {
int nerv_param_file_get_chunkdata(lua_State *L) {
ParamFileHandle *pfh;
ParamChunkInfo *pci;
+ ParamChunkData *pcd;
const char *id = luaL_checkstring(L, 2);
lua_getfield(L, 1, "handle");
@@ -252,9 +254,9 @@ int nerv_param_file_get_chunkdata(lua_State *L) {
return 0;
lua_getfield(L, -1, "chunk");
pci = luaT_checkudata(L, -1, nerv_param_chunk_info_tname);
-
- luaT_pushudata(L, get_param_chunk_data(pfh->fp, pci),
- nerv_param_chunk_data_tname);
+ if (!(pcd = get_param_chunk_data(pfh->fp, pci)))
+ nerv_error(L, "unexpected end of file");
+ luaT_pushudata(L, pcd, nerv_param_chunk_data_tname);
return 1;
}
diff --git a/layer/affine.lua b/layer/affine.lua
index cd2ba0b..221aacd 100644
--- a/layer/affine.lua
+++ b/layer/affine.lua
@@ -14,23 +14,31 @@ function AffineLayer:__init(id, global_conf, ltp, bp)
self.ltp = ltp
self.bp = bp
self.gconf = global_conf
+end
+
+function AffineLayer:init()
-- linear transform correction
- self.ltc = ltp:create()
+ self.ltc = self.ltp.trans:create()
self.ltc:fill(0)
-- bias correction
- self.bc = bp:create()
+ self.bc = self.bp.trans:create()
self.bc:fill(0)
end
function nerv.AffineLayer:update(bp_err, input, output)
+ local ltp = self.ltp.trans
+ local bp = self.bp.trans
+ local ltc = self.ltc
+ local bc = self.bc
+ local gconf = self.gconf
-- momentum gain
local mmt_gain = 1.0 / (1.0 - gconf.momentum);
- local n = input.nrow() * mmt_gain
+ local n = input:nrow() * mmt_gain
-- update corrections (accumulated errors)
ltc:mul(input, bp_err, 1.0, gconf.momentum, 'T', 'N')
bc:add(bc, bp_err:colsum(), gconf.momentum, 1.0)
-- perform update
- ltp:add(lpc, ltc, 1.0, -gconf.lrate / n)
+ ltp:add(ltp, ltc, 1.0, -gconf.lrate / n)
bp:add(bp, bc, 1.0, -gconf.lrate / n)
-- weight decay
ltp:add(ltp, ltp, 1.0, -gconf.lrate * gconf.wcost)
@@ -38,11 +46,11 @@ end
function nerv.AffineLayer:propagate(input, output)
-- apply linear transform
- output:mul(input, self.ltp, 'N', 'N')
+ output:mul(input, self.ltp.trans, 1.0, 0.0, 'N', 'N')
-- add bias
- output:add_row(self.bp, 1.0)
+ output:add_row(self.bp.trans, 1.0)
end
function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output)
- next_bp_err:mul(bp_err, self.ltp, 'N', 'T')
+ next_bp_err:mul(bp_err, self.ltp.trans, 1.0, 0.0, 'N', 'T')
end
diff --git a/layer/init.lua b/layer/init.lua
index 6923dbd..0f0afe8 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -6,6 +6,10 @@ function nerv.Param:__init(id)
self.id = id
end
+function nerv.Param:init(id)
+ nerv.error_method_not_implemented()
+end
+
function nerv.Param:get_info()
return self.info
end
diff --git a/matrix/generic/cumatrix.c b/matrix/generic/cumatrix.c
index aa303d4..8de6c1b 100644
--- a/matrix/generic/cumatrix.c
+++ b/matrix/generic/cumatrix.c
@@ -43,8 +43,7 @@ static int nerv_matrix_(add)(lua_State *L) {
if (!(a->nrow == b->nrow && a->ncol == b->ncol))
nerv_error(L, "Matrices should be of the same dimension");
nerv_matrix_(add_)(a, b, c, alpha, beta);
- luaT_pushudata(L, c, nerv_matrix_(tname));
- return 1;
+ return 0;
}
static int nerv_matrix_(get_cublas_op)(char ch) {
@@ -52,6 +51,9 @@ static int nerv_matrix_(get_cublas_op)(char ch) {
}
static int nerv_matrix_(mul)(lua_State *L) {
+#define SWAP(a, b) \
+ do { int t = (a); (a) = (b); (b) = t; } while (0)
+
Matrix *c = luaT_checkudata(L, 1, nerv_matrix_(tname));
Matrix *a = luaT_checkudata(L, 2, nerv_matrix_(tname));
Matrix *b = luaT_checkudata(L, 3, nerv_matrix_(tname));
@@ -62,23 +64,26 @@ static int nerv_matrix_(mul)(lua_State *L) {
: CUBLAS_OP_N;
int tb = nargs > 6 ? nerv_matrix_(get_cublas_op)(*luaL_checkstring(L, 7)) \
: CUBLAS_OP_N;
- printf("%d %d\n", ta, tb);
- if (a->ncol != b->nrow)
+ int am = a->nrow, an = a->ncol;
+ int bm = b->nrow, bn = b->ncol;
+ if (ta == CUBLAS_OP_T) SWAP(am, an);
+ if (tb == CUBLAS_OP_T) SWAP(bm, bn);
+ if (an != bm)
nerv_error(L, "Wrong dimension of multipliers");
/* MATRIX_ELEM alpha = 1.0f, beta = 0.0f; */
NERV_CUBLAS_(gemm)(cublas_handle, tb, ta,
- b->ncol, a->nrow, b->nrow,
+ bn, am, bm,
&alpha,
MATRIX_ELEM_PTR(b), b->stride / sizeof(MATRIX_ELEM),
MATRIX_ELEM_PTR(a), a->stride / sizeof(MATRIX_ELEM),
&beta,
MATRIX_ELEM_PTR(c), c->stride / sizeof(MATRIX_ELEM));
- luaT_pushudata(L, c, nerv_matrix_(tname));
- return 1;
+ return 0;
}
static int nerv_matrix_(create)(lua_State *L) {
Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ fprintf(stderr, "create\n");
Matrix *b = nerv_matrix_(new_)(a->nrow, a->ncol);
luaT_pushudata(L, b, nerv_matrix_(tname));
return 1;
@@ -174,6 +179,21 @@ static int nerv_matrix_(copy_to)(lua_State *L) {
return 0;
}
+static int nerv_matrix_(trans)(lua_State *L) {
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = nerv_matrix_(new_)(a->ncol, a->nrow);
+ MATRIX_ELEM alpha = 1, beta = 0;
+ NERV_CUBLAS_(geam)(cublas_handle, CUBLAS_OP_T, CUBLAS_OP_T,
+ a->nrow, a->ncol,
+ &alpha,
+ MATRIX_ELEM_PTR(a), a->stride / sizeof(MATRIX_ELEM),
+ &beta,
+ MATRIX_ELEM_PTR(a), a->stride / sizeof(MATRIX_ELEM),
+ MATRIX_ELEM_PTR(b), b->stride / sizeof(MATRIX_ELEM));
+ luaT_pushudata(L, b, nerv_matrix_(tname));
+ return 1;
+}
+
static const luaL_Reg nerv_matrix_(extra_methods)[] = {
{"create", nerv_matrix_(create)},
@@ -184,6 +204,7 @@ static const luaL_Reg nerv_matrix_(extra_methods)[] = {
{"rowmax", nerv_matrix_(rowmax)},
{"copy_from", nerv_matrix_(copy_from)},
{"copy_to", nerv_matrix_(copy_to)},
+ {"trans", nerv_matrix_(trans)},
/* in-place calc */
{"add", nerv_matrix_(add)},
{"mul", nerv_matrix_(mul)},
diff --git a/matrix/generic/matrix.c b/matrix/generic/matrix.c
index c3838d2..74c9f19 100644
--- a/matrix/generic/matrix.c
+++ b/matrix/generic/matrix.c
@@ -9,8 +9,14 @@ extern const char *nerv_matrix_(tname);
extern const char *MATRIX_BASE_TNAME;
void nerv_matrix_(data_free)(Matrix *self) {
+ assert(*self->data_ref > 0);
if (--(*self->data_ref) == 0)
+ {
+ /* free matrix data */
MATRIX_DATA_FREE(MATRIX_ELEM_PTR(self));
+ free(self->data_ref);
+ free(self);
+ }
}
void nerv_matrix_(data_retain)(Matrix *self) {
@@ -40,7 +46,7 @@ int nerv_matrix_(new)(lua_State *L) {
int nerv_matrix_(destroy)(lua_State *L) {
Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
nerv_matrix_(data_free)(self);
- return 0;
+ return 1;
}
int nerv_matrix_(get_elem)(lua_State *L);
@@ -54,7 +60,7 @@ static Matrix *nerv_matrix_(getrow)(Matrix *self, int row) {
prow->nmax = prow->ncol;
MATRIX_ELEM_PTR(prow) = MATRIX_ROW_PTR(self, row);
prow->data_ref = self->data_ref;
- nerv_matrix_(data_retain)(self);
+ nerv_matrix_(data_retain)(prow);
return prow;
}
diff --git a/matrix/init.lua b/matrix/init.lua
index 09c9c64..a04e83a 100644
--- a/matrix/init.lua
+++ b/matrix/init.lua
@@ -35,7 +35,7 @@ end
function nerv.CuMatrix:__mul__(b)
c = self:create()
- c:mul(self, b, 0.5, 0.0, 'N', 'N')
+ c:mul(self, b, 1.0, 0.0, 'N', 'N')
return c
end