aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTed Yin <[email protected]>2015-06-26 00:45:13 +0800
committerTed Yin <[email protected]>2015-06-26 00:45:13 +0800
commitaae4195c3898c0da0be5aae0b80e633185e1e242 (patch)
tree67512bdf955e2753547ebdac2e18f837ce6fa5b0
parent555f7f2e8d0578a1228cd736a35f80ba5a891817 (diff)
parentd5e95b665b9dc1890fb575fcc9fca5087eaeb674 (diff)
Merge pull request #34 from cloudygoose/try-matrix-index
add a temporary fix for matrix/vector indexing issue
-rw-r--r--Makefile4
m---------luajit-2.00
m---------luarocks0
-rw-r--r--nerv/Makefile4
-rw-r--r--nerv/layer/softmax_ce.lua2
-rw-r--r--nerv/lib/matrix/generic/matrix.c2
-rw-r--r--nerv/lib/matrix/init.lua7
-rw-r--r--nerv/lib/matrix/matrix.h1
-rw-r--r--nerv/matrix/generic/matrix.c11
-rw-r--r--nerv/matrix/init.lua25
10 files changed, 35 insertions, 21 deletions
diff --git a/Makefile b/Makefile
index bfcd56f..fa888c3 100644
--- a/Makefile
+++ b/Makefile
@@ -1,7 +1,7 @@
.PHONY: all clean install luajit luarocks speech
SHELL := /bin/bash
PREFIX := $(CURDIR)/install/
-all: luajit luarocks
+all: luajit luarocks install
luajit:
PREFIX=$(PREFIX) ./tools/build_luajit.sh
luarocks:
@@ -10,3 +10,5 @@ install:
cd nerv; $(PREFIX)/bin/luarocks make
speech:
cd speech/htk_io; $(PREFIX)/bin/luarocks make
+clean:
+ cd nerv && make clean
diff --git a/luajit-2.0 b/luajit-2.0
-Subproject 11106aa83374c95f88679452e997229ecedefdc
+Subproject 7f454aed82ef364245ae73a16a04b21e2245e34
diff --git a/luarocks b/luarocks
-Subproject a0315b7bc2432ea517bb90ce39df0cc8b1cd2f6
+Subproject 028b37f2376225aa69c946beb44fd7dd68f3260
diff --git a/nerv/Makefile b/nerv/Makefile
index 224cc8a..7b75522 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -36,8 +36,8 @@ LUA_LIBS := matrix/init.lua io/init.lua init.lua \
io/sgd_buffer.lua
INCLUDE := -I $(LUA_INCDIR) -DLUA_USE_APICHECK
-#CUDA_BASE := /usr/local/cuda-6.5
-CUDA_BASE := /usr/local/cuda-5.0
+CUDA_BASE := /usr/local/cuda-6.5
+#CUDA_BASE := /usr/local/cuda-5.0
CUDA_INCLUDE := -I $(CUDA_BASE)/include/
INCLUDE += $(CUDA_INCLUDE)
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index daf891e..c78d462 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -42,7 +42,7 @@ function SoftmaxCELayer:propagate(input, output)
output[1]:copy_fromd(ce)
end
-- add total ce
- self.total_ce = self.total_ce - ce:colsum()[0]
+ self.total_ce = self.total_ce - ce:colsum()[0][0]
self.total_frames = self.total_frames + softmax:nrow()
-- TODO: add colsame for uncompressed label
if self.compressed then
diff --git a/nerv/lib/matrix/generic/matrix.c b/nerv/lib/matrix/generic/matrix.c
index a64759e..5dbad48 100644
--- a/nerv/lib/matrix/generic/matrix.c
+++ b/nerv/lib/matrix/generic/matrix.c
@@ -24,6 +24,7 @@ Matrix *nerv_matrix_(create)(long nrow, long ncol, Status *status) {
self->nrow = nrow;
self->ncol = ncol;
self->nmax = self->nrow * self->ncol;
+ self->dim = 2;
MATRIX_DATA_ALLOC(&MATRIX_ELEM_PTR(self), &self->stride,
sizeof(MATRIX_ELEM) * self->ncol, self->nrow,
status);
@@ -47,6 +48,7 @@ Matrix *nerv_matrix_(getrow)(Matrix *self, int row) {
Matrix *prow = (Matrix *)malloc(sizeof(Matrix));
prow->ncol = self->ncol;
prow->nrow = 1;
+ prow->dim = 1;
prow->stride = self->stride;
prow->nmax = prow->ncol;
MATRIX_ELEM_PTR(prow) = MATRIX_ROW_PTR(self, row);
diff --git a/nerv/lib/matrix/init.lua b/nerv/lib/matrix/init.lua
index 1a8925f..89f89d6 100644
--- a/nerv/lib/matrix/init.lua
+++ b/nerv/lib/matrix/init.lua
@@ -1,6 +1,7 @@
function nerv.Matrix:__tostring__()
local ncol = self:ncol()
local nrow = self:nrow()
+ local dim = self:dim()
local strt = {}
local fmt
if self.fmt then
@@ -8,7 +9,7 @@ function nerv.Matrix:__tostring__()
else
fmt = "%.8f "
end
- if nrow == 1 then
+ if dim == 1 then
for col = 0, ncol - 1 do
table.insert(strt, string.format(fmt, self[col]))
end
@@ -28,9 +29,9 @@ function nerv.Matrix:__tostring__()
end
-- gen: a function takes take indices of the matrix and return the generated
--- all entrys in the matrix will be assigned by calling gen(i, j)
+-- all entrys in the matrix will be assigned by calling gen(i, j), if self is a row vector, gen(j) will be called
function nerv.Matrix:generate(gen)
- if (self:nrow() == 1) then
+ if (self:dim() == 1) then
for j = 0, self:ncol() - 1 do
self[j] = gen(j)
end
diff --git a/nerv/lib/matrix/matrix.h b/nerv/lib/matrix/matrix.h
index cbf32c2..67a6e30 100644
--- a/nerv/lib/matrix/matrix.h
+++ b/nerv/lib/matrix/matrix.h
@@ -6,6 +6,7 @@
typedef struct Matrix {
size_t stride; /* size of a row */
long ncol, nrow, nmax; /* dimension of the matrix */
+ int dim; /* dim == 2 for a matrix, dim == 1 for row vector */
union {
float *f;
double *d;
diff --git a/nerv/matrix/generic/matrix.c b/nerv/matrix/generic/matrix.c
index ff89e54..8efe608 100644
--- a/nerv/matrix/generic/matrix.c
+++ b/nerv/matrix/generic/matrix.c
@@ -31,7 +31,7 @@ static int nerv_matrix_(lua_newindex)(lua_State *L) {
if (lua_isnumber(L, 2))
{
int idx = luaL_checkinteger(L, 2);
- if (self->nrow == 1)
+ if (self->dim == 1)
{
if (idx < 0 || idx >= self->ncol)
nerv_error(L, "index must be within range [0, %d)", self->ncol);
@@ -57,7 +57,7 @@ static int nerv_matrix_(lua_index)(lua_State *L) {
if (lua_isnumber(L, 2))
{
int idx = luaL_checkinteger(L, 2);
- if (self->nrow == 1)
+ if (self->dim == 1)
{
if (idx < 0 || idx >= self->ncol)
nerv_error(L, "index must be within range [0, %d)", self->ncol);
@@ -86,6 +86,12 @@ static int nerv_matrix_(lua_ncol)(lua_State *L) {
return 1;
}
+static int nerv_matrix_(lua_dim)(lua_State *L) {
+ Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ lua_pushinteger(L, self->dim);
+ return 1;
+}
+
static int nerv_matrix_(lua_nrow)(lua_State *L) {
Matrix *self = luaT_checkudata(L, 1, nerv_matrix_(tname));
lua_pushinteger(L, self->nrow);
@@ -103,6 +109,7 @@ static const luaL_Reg nerv_matrix_(methods)[] = {
{"set_elem", nerv_matrix_(lua_set_elem)},
{"ncol", nerv_matrix_(lua_ncol)},
{"nrow", nerv_matrix_(lua_nrow)},
+ {"dim", nerv_matrix_(lua_dim)},
{"get_dataref_value", nerv_matrix_(lua_get_dataref_value)},
{"__index__", nerv_matrix_(lua_index)},
{"__newindex__", nerv_matrix_(lua_newindex)},
diff --git a/nerv/matrix/init.lua b/nerv/matrix/init.lua
index 1a8925f..f230e9f 100644
--- a/nerv/matrix/init.lua
+++ b/nerv/matrix/init.lua
@@ -1,6 +1,7 @@
function nerv.Matrix:__tostring__()
local ncol = self:ncol()
local nrow = self:nrow()
+ local dim = self:dim()
local strt = {}
local fmt
if self.fmt then
@@ -8,12 +9,7 @@ function nerv.Matrix:__tostring__()
else
fmt = "%.8f "
end
- if nrow == 1 then
- for col = 0, ncol - 1 do
- table.insert(strt, string.format(fmt, self[col]))
- end
- table.insert(strt, "\n")
- else
+ if (dim == 2) then
for row = 0, nrow - 1 do
local rp = self[row]
for col = 0, ncol - 1 do
@@ -21,6 +17,11 @@ function nerv.Matrix:__tostring__()
end
table.insert(strt, "\n")
end
+ else
+ for col = 0, ncol - 1 do
+ table.insert(strt, string.format(fmt, self[col]))
+ end
+ table.insert(strt, "\n")
end
table.insert(strt, string.format(
"[%s %d x %d]", self.__typename, nrow, ncol))
@@ -28,19 +29,19 @@ function nerv.Matrix:__tostring__()
end
-- gen: a function takes take indices of the matrix and return the generated
--- all entrys in the matrix will be assigned by calling gen(i, j)
+-- all entrys in the matrix will be assigned by calling gen(i, j), for a vector, gen(j) will be called.
function nerv.Matrix:generate(gen)
- if (self:nrow() == 1) then
- for j = 0, self:ncol() - 1 do
- self[j] = gen(j)
- end
- else
+ if (self:dim() == 2) then
for i = 0, self:nrow() - 1 do
local row = self[i]
for j = 0, self:ncol() - 1 do
row[j] = gen(i, j)
end
end
+ else
+ for j = 0, self:ncol() - 1 do
+ self[j] = gen(j)
+ end
end
end