diff options
author | Determinant <[email protected]> | 2015-06-26 01:03:54 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-06-26 01:03:54 +0800 |
commit | bc17acd4c5f98df4e00b7c85e479cbff2d5da5a7 (patch) | |
tree | b05de64bc93d13c23ff11aa7f6650ad9f4bd8dc1 | |
parent | aae4195c3898c0da0be5aae0b80e633185e1e242 (diff) |
...
-rw-r--r-- | nerv/layer/softmax_ce.lua | 2 | ||||
-rw-r--r-- | nerv/lib/matrix/init.lua | 78 |
2 files changed, 1 insertions, 79 deletions
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua index c78d462..f878a2f 100644 --- a/nerv/layer/softmax_ce.lua +++ b/nerv/layer/softmax_ce.lua @@ -46,7 +46,7 @@ function SoftmaxCELayer:propagate(input, output) self.total_frames = self.total_frames + softmax:nrow() -- TODO: add colsame for uncompressed label if self.compressed then - self.total_correct = self.total_correct + classified:colsame(input[2])[0] + self.total_correct = self.total_correct + classified:colsame(input[2])[0][0] end end diff --git a/nerv/lib/matrix/init.lua b/nerv/lib/matrix/init.lua deleted file mode 100644 index 89f89d6..0000000 --- a/nerv/lib/matrix/init.lua +++ /dev/null @@ -1,78 +0,0 @@ -function nerv.Matrix:__tostring__() - local ncol = self:ncol() - local nrow = self:nrow() - local dim = self:dim() - local strt = {} - local fmt - if self.fmt then - fmt = self.fmt - else - fmt = "%.8f " - end - if dim == 1 then - for col = 0, ncol - 1 do - table.insert(strt, string.format(fmt, self[col])) - end - table.insert(strt, "\n") - else - for row = 0, nrow - 1 do - local rp = self[row] - for col = 0, ncol - 1 do - table.insert(strt, string.format(fmt, rp[col])) - end - table.insert(strt, "\n") - end - end - table.insert(strt, string.format( - "[%s %d x %d]", self.__typename, nrow, ncol)) - return table.concat(strt) -end - --- gen: a function takes take indices of the matrix and return the generated --- all entrys in the matrix will be assigned by calling gen(i, j), if self is a row vector, gen(j) will be called -function nerv.Matrix:generate(gen) - if (self:dim() == 1) then - for j = 0, self:ncol() - 1 do - self[j] = gen(j) - end - else - for i = 0, self:nrow() - 1 do - local row = self[i] - for j = 0, self:ncol() - 1 do - row[j] = gen(i, j) - end - end - end -end - -nerv.MMatrixInt.fmt = "%d " - -function nerv.CuMatrix:__add__(b) - c = self:create() - c:add(self, b, 1.0, 1.0) - return c -end - -function nerv.CuMatrix:__sub__(b) - c = self:create() - c:add(self, b, 1.0, -1.0) - return c -end - -function nerv.CuMatrix:__mul__(b) - c = nerv.get_type(self.__typename)(self:nrow(), b:ncol()) - c:mul(self, b, 1.0, 0.0, 'N', 'N') - return c -end - -function nerv.CuMatrixFloat.new_from_host(mat) - local res = nerv.CuMatrixFloat(mat:nrow(), mat:ncol()) - res:copy_fromh(mat) - return res -end - -function nerv.CuMatrixFloat:new_to_host() - local res = nerv.MMatrixFloat(self:nrow(), self:ncol()) - self:copy_toh(res) - return res -end |