aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-04-18 12:38:25 +0800
committerDeterminant <ted.sybil@gmail.com>2016-04-18 12:38:25 +0800
commitc73636ba680cdb5b57602a1876a75c110b43d426 (patch)
tree25a36588f066bfb850c5d0a81c5c40f01b8def56
parentb15dbc05b08008327b88a62d7f90ef9250182e9b (diff)
support SeqBuffer decoding; fix bugs in finding paramsalpha-4
-rw-r--r--nerv/layer/init.lua9
-rw-r--r--nerv/layer/lstm_gate.lua4
-rw-r--r--nerv/lib/matrix/generic/mmatrix.c48
-rw-r--r--nerv/matrix/generic/cumatrix.c23
-rw-r--r--nerv/matrix/generic/matrix.c23
-rw-r--r--nerv/matrix/generic/mmatrix.c2
-rw-r--r--nerv/test/cumatrix_func.out13
-rw-r--r--nerv/test/matrix_func.lua4
-rw-r--r--nerv/test/mmatrix_func.out13
9 files changed, 108 insertions, 31 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index c5b7657..3a6cbcd 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -114,12 +114,13 @@ function Layer:find_param(plist, lconf, gconf, p_type, p_dim, p_gen)
if lconf.pr:has_param(pid) then
return lconf.pr:get_param(pid)
end
+ pid = self.id .. '_' .. pname
+ if lconf.pr:has_param(pid) then
+ nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id)
+ return lconf.pr:get_param(pid)
+ end
end
pid = self.id .. '_' .. plist[1]
- if lconf.pr:has_param(pid) then
- nerv.info("param id for [%s] of layer [%s] is generated automatically.", plist[1], self.id)
- return lconf.pr:get_param(pid)
- end
nerv.info("param id for [%s] of layer [%s] is not found in the specified param repo, " ..
"switch to auto-generate", plist_str, self.id)
local p = p_type(pid, gconf)
diff --git a/nerv/layer/lstm_gate.lua b/nerv/layer/lstm_gate.lua
index e3b11b4..a3ae797 100644
--- a/nerv/layer/lstm_gate.lua
+++ b/nerv/layer/lstm_gate.lua
@@ -37,12 +37,12 @@ function LSTMGateLayer:init(batch_size)
nerv.error("mismatching dimensions of linear transform parameter and output")
end
self.bp:train_init()
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
function LSTMGateLayer:batch_resize(batch_size)
if self.err_m:nrow() ~= batch_size then
- self.err_bakm = self.gconf.cumat_type(batch_size, self.dim_out[1])
+ self.err_bakm = self.mat_type(batch_size, self.dim_out[1])
end
end
diff --git a/nerv/lib/matrix/generic/mmatrix.c b/nerv/lib/matrix/generic/mmatrix.c
index e356de7..ccfb2ce 100644
--- a/nerv/lib/matrix/generic/mmatrix.c
+++ b/nerv/lib/matrix/generic/mmatrix.c
@@ -412,6 +412,54 @@ void nerv_matrix_(log_elem)(Matrix *b, const Matrix *a,
NERV_SET_STATUS(status, NERV_NORMAL, 0);
}
+void nerv_matrix_(tanh)(Matrix *b, const Matrix *a,
+ MContext *context, Status *status) {
+ CHECK_SAME_DIMENSION(a, b, status);
+ int i, j;
+ size_t astride = a->stride, bstride = b->stride;
+ const MATRIX_ELEM *arow = MATRIX_ELEM_PTR(a);
+ MATRIX_ELEM *brow = MATRIX_ELEM_PTR(b);
+ MATRIX_ELEM limit =
+#ifdef MATRIX_USE_FLOAT
+ FLT_MIN;
+#elif defined (MATRIX_USE_DOUBLE)
+ DBL_MIN;
+#elif defined (MATRIX_USE_INT)
+ 1;
+#endif
+ for (i = 0; i < b->nrow; i++)
+ {
+ for (j = 0; j < b->ncol; j++)
+ brow[j] = tanh(arow[j]);
+ arow = MATRIX_NEXT_ROW_PTR(arow, astride);
+ brow = MATRIX_NEXT_ROW_PTR(brow, bstride);
+ }
+ NERV_SET_STATUS(status, NERV_NORMAL, 0);
+}
+
+void nerv_matrix_(tanh_grad)(Matrix *nerr, const Matrix *err,
+ const Matrix *output,
+ MContext *context, Status *status) {
+ CHECK_SAME_DIMENSION(nerr, err, status);
+ CHECK_SAME_DIMENSION(nerr, output, status);
+ int i, j;
+ size_t nerr_stride = nerr->stride,
+ err_stride = err->stride,
+ out_stride = output->stride;
+ MATRIX_ELEM *nerr_row = MATRIX_ELEM_PTR(nerr);
+ const MATRIX_ELEM *err_row = MATRIX_ELEM_PTR(err),
+ *out_row = MATRIX_ELEM_PTR(output);
+ for (i = 0; i < nerr->nrow; i++)
+ {
+ for (j = 0; j < nerr->ncol; j++)
+ nerr_row[j] = (1.0 - out_row[j] * out_row[j]) * err_row[j];
+ nerr_row = MATRIX_NEXT_ROW_PTR(nerr_row, nerr_stride);
+ err_row = MATRIX_NEXT_ROW_PTR(err_row, err_stride);
+ out_row = MATRIX_NEXT_ROW_PTR(out_row, out_stride);
+ }
+ NERV_SET_STATUS(status, NERV_NORMAL, 0);
+}
+
void nerv_matrix_(expand_frm)(Matrix *a, const Matrix *b,
int cont, MContext *context, Status *status) {
if (a->nrow != b->nrow)
diff --git a/nerv/matrix/generic/cumatrix.c b/nerv/matrix/generic/cumatrix.c
index 9577fd5..540afd2 100644
--- a/nerv/matrix/generic/cumatrix.c
+++ b/nerv/matrix/generic/cumatrix.c
@@ -51,29 +51,6 @@ static int nerv_matrix_(lua_rand_uniform)(lua_State *L) {
return 0;
}
-static int nerv_matrix_(lua_tanh)(lua_State *L) {
- Status status;
- MATRIX_CONTEXT *context;
- MATRIX_GET_CONTEXT(L, 3);
- Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
- nerv_matrix_(tanh)(a, b, context, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
-static int nerv_matrix_(lua_tanh_grad)(lua_State *L) {
- Status status;
- MATRIX_CONTEXT *context;
- MATRIX_GET_CONTEXT(L, 4);
- Matrix *nerr = luaT_checkudata(L, 1, nerv_matrix_(tname));
- Matrix *err = luaT_checkudata(L, 2, nerv_matrix_(tname));
- Matrix *output = luaT_checkudata(L, 3, nerv_matrix_(tname));
- nerv_matrix_(tanh_grad)(nerr, err, output, context, &status);
- NERV_LUA_CHECK_STATUS(L, status);
- return 0;
-}
-
extern const char *MATRIX_CUMATRIX_HOST_TNAME;
static int nerv_matrix_(lua_copy_fromh)(lua_State *L) {
Status status;
diff --git a/nerv/matrix/generic/matrix.c b/nerv/matrix/generic/matrix.c
index 3e91933..800408d 100644
--- a/nerv/matrix/generic/matrix.c
+++ b/nerv/matrix/generic/matrix.c
@@ -407,4 +407,27 @@ static int nerv_matrix_(lua_set_values_by_mask)(lua_State *L) {
return 0;
}
+static int nerv_matrix_(lua_tanh)(lua_State *L) {
+ Status status;
+ MATRIX_CONTEXT *context;
+ MATRIX_GET_CONTEXT(L, 3);
+ Matrix *a = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *b = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ nerv_matrix_(tanh)(a, b, context, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
+static int nerv_matrix_(lua_tanh_grad)(lua_State *L) {
+ Status status;
+ MATRIX_CONTEXT *context;
+ MATRIX_GET_CONTEXT(L, 4);
+ Matrix *nerr = luaT_checkudata(L, 1, nerv_matrix_(tname));
+ Matrix *err = luaT_checkudata(L, 2, nerv_matrix_(tname));
+ Matrix *output = luaT_checkudata(L, 3, nerv_matrix_(tname));
+ nerv_matrix_(tanh_grad)(nerr, err, output, context, &status);
+ NERV_LUA_CHECK_STATUS(L, status);
+ return 0;
+}
+
#endif
diff --git a/nerv/matrix/generic/mmatrix.c b/nerv/matrix/generic/mmatrix.c
index de1eaa3..c03aee4 100644
--- a/nerv/matrix/generic/mmatrix.c
+++ b/nerv/matrix/generic/mmatrix.c
@@ -120,6 +120,8 @@ static const luaL_Reg nerv_matrix_(extra_methods)[] = {
{"set_values_by_mask", nerv_matrix_(lua_set_values_by_mask)},
{"sigmoid", nerv_matrix_(lua_sigmoid)},
{"sigmoid_grad", nerv_matrix_(lua_sigmoid_grad)},
+ {"tanh", nerv_matrix_(lua_tanh)},
+ {"tanh_grad", nerv_matrix_(lua_tanh_grad)},
{"softmax", nerv_matrix_(lua_softmax)},
{"mul_elem", nerv_matrix_(lua_mul_elem)},
{"log_elem", nerv_matrix_(lua_log_elem)},
diff --git a/nerv/test/cumatrix_func.out b/nerv/test/cumatrix_func.out
index 2761126..44e9015 100644
--- a/nerv/test/cumatrix_func.out
+++ b/nerv/test/cumatrix_func.out
@@ -1,4 +1,3 @@
-Greetings
0.00000000 1.00000000 2.00000000 3.00000000
1.00000000 2.00000000 3.00000000 4.00000000
2.00000000 3.00000000 4.00000000 5.00000000
@@ -176,6 +175,10 @@ Greetings
1.00000000 2.00000000 3.00000000 4.00000000
4.00000000 6.00000000 8.00000000 10.00000000
[nerv.CuMatrixFloat 3 x 4]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.CuMatrixFloat 3 x 4]
0.00000000 1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000
1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000 40.00000000
2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000 40.00000000 41.00000000
@@ -1523,6 +1526,10 @@ Greetings
784.00000000 812.00000000 840.00000000 868.00000000 896.00000000 924.00000000 952.00000000 980.00000000 1008.00000000 1036.00000000 1064.00000000 1092.00000000 1120.00000000 1148.00000000 1176.00000000 1204.00000000 1232.00000000 1260.00000000 1288.00000000 1316.00000000 1344.00000000 1372.00000000 1400.00000000 1428.00000000 1456.00000000 1484.00000000 1512.00000000 1540.00000000 1568.00000000 1596.00000000 1624.00000000 1652.00000000 1680.00000000 1708.00000000 1736.00000000 1764.00000000 1792.00000000 1820.00000000 1848.00000000 1876.00000000
841.00000000 870.00000000 899.00000000 928.00000000 957.00000000 986.00000000 1015.00000000 1044.00000000 1073.00000000 1102.00000000 1131.00000000 1160.00000000 1189.00000000 1218.00000000 1247.00000000 1276.00000000 1305.00000000 1334.00000000 1363.00000000 1392.00000000 1421.00000000 1450.00000000 1479.00000000 1508.00000000 1537.00000000 1566.00000000 1595.00000000 1624.00000000 1653.00000000 1682.00000000 1711.00000000 1740.00000000 1769.00000000 1798.00000000 1827.00000000 1856.00000000 1885.00000000 1914.00000000 1943.00000000 1972.00000000
[nerv.CuMatrixFloat 30 x 40]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.CuMatrixFloat 3 x 4]
0.00000000 1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000
1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000
2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000
@@ -1950,3 +1957,7 @@ Greetings
64.00000000 72.00000000 80.00000000 88.00000000 96.00000000 104.00000000 112.00000000 120.00000000 128.00000000 136.00000000
81.00000000 90.00000000 99.00000000 108.00000000 117.00000000 126.00000000 135.00000000 144.00000000 153.00000000 162.00000000
[nerv.CuMatrixFloat 10 x 10]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.CuMatrixFloat 3 x 4]
diff --git a/nerv/test/matrix_func.lua b/nerv/test/matrix_func.lua
index 07ddf9c..817d463 100644
--- a/nerv/test/matrix_func.lua
+++ b/nerv/test/matrix_func.lua
@@ -160,6 +160,10 @@ function _test_all_shape(mat_type, m, n, k, fill)
print(a)
a:scale_rows_by_col(b)
print(a)
+ a = fill(mat_type, 3, 4)
+ local c = a:create()
+ c:tanh(a)
+ print(c)
end
function test_all(mat_type)
_test_all_shape(mat_type, 3, 4, 2, _pattern_fill)
diff --git a/nerv/test/mmatrix_func.out b/nerv/test/mmatrix_func.out
index 9f95989..721ee21 100644
--- a/nerv/test/mmatrix_func.out
+++ b/nerv/test/mmatrix_func.out
@@ -1,4 +1,3 @@
-Greetings
0.00000000 1.00000000 2.00000000 3.00000000
1.00000000 2.00000000 3.00000000 4.00000000
2.00000000 3.00000000 4.00000000 5.00000000
@@ -176,6 +175,10 @@ Greetings
1.00000000 2.00000000 3.00000000 4.00000000
4.00000000 6.00000000 8.00000000 10.00000000
[nerv.MMatrixFloat 3 x 4]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.MMatrixFloat 3 x 4]
0.00000000 1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000
1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000 40.00000000
2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000 12.00000000 13.00000000 14.00000000 15.00000000 16.00000000 17.00000000 18.00000000 19.00000000 20.00000000 21.00000000 22.00000000 23.00000000 24.00000000 25.00000000 26.00000000 27.00000000 28.00000000 29.00000000 30.00000000 31.00000000 32.00000000 33.00000000 34.00000000 35.00000000 36.00000000 37.00000000 38.00000000 39.00000000 40.00000000 41.00000000
@@ -1523,6 +1526,10 @@ Greetings
784.00000000 812.00000000 840.00000000 868.00000000 896.00000000 924.00000000 952.00000000 980.00000000 1008.00000000 1036.00000000 1064.00000000 1092.00000000 1120.00000000 1148.00000000 1176.00000000 1204.00000000 1232.00000000 1260.00000000 1288.00000000 1316.00000000 1344.00000000 1372.00000000 1400.00000000 1428.00000000 1456.00000000 1484.00000000 1512.00000000 1540.00000000 1568.00000000 1596.00000000 1624.00000000 1652.00000000 1680.00000000 1708.00000000 1736.00000000 1764.00000000 1792.00000000 1820.00000000 1848.00000000 1876.00000000
841.00000000 870.00000000 899.00000000 928.00000000 957.00000000 986.00000000 1015.00000000 1044.00000000 1073.00000000 1102.00000000 1131.00000000 1160.00000000 1189.00000000 1218.00000000 1247.00000000 1276.00000000 1305.00000000 1334.00000000 1363.00000000 1392.00000000 1421.00000000 1450.00000000 1479.00000000 1508.00000000 1537.00000000 1566.00000000 1595.00000000 1624.00000000 1653.00000000 1682.00000000 1711.00000000 1740.00000000 1769.00000000 1798.00000000 1827.00000000 1856.00000000 1885.00000000 1914.00000000 1943.00000000 1972.00000000
[nerv.MMatrixFloat 30 x 40]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.MMatrixFloat 3 x 4]
0.00000000 1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000
1.00000000 2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000
2.00000000 3.00000000 4.00000000 5.00000000 6.00000000 7.00000000 8.00000000 9.00000000 10.00000000 11.00000000
@@ -1950,3 +1957,7 @@ Greetings
64.00000000 72.00000000 80.00000000 88.00000000 96.00000000 104.00000000 112.00000000 120.00000000 128.00000000 136.00000000
81.00000000 90.00000000 99.00000000 108.00000000 117.00000000 126.00000000 135.00000000 144.00000000 153.00000000 162.00000000
[nerv.MMatrixFloat 10 x 10]
+0.00000000 0.76159418 0.96402758 0.99505478
+0.76159418 0.96402758 0.99505478 0.99932933
+0.96402758 0.99505478 0.99932933 0.99990922
+[nerv.MMatrixFloat 3 x 4]