diff options
author | Determinant <[email protected]> | 2015-05-28 14:31:31 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-05-28 14:31:31 +0800 |
commit | c13115662e739b434f1071eb623a41a39d8b4985 (patch) | |
tree | 9368b5706ef9ddb3002369d3193f525ad9814201 | |
parent | 382106f36025f76e2f5d04c44b9ccb0998cf40cf (diff) |
should support multiple input/output for layers
-rw-r--r-- | layer/affine.lua | 12 | ||||
-rw-r--r-- | layer/sigmoid.lua | 4 | ||||
-rw-r--r-- | matrix/init.lua | 4 |
3 files changed, 10 insertions, 10 deletions
diff --git a/layer/affine.lua b/layer/affine.lua index 94e7497..97703a8 100644 --- a/layer/affine.lua +++ b/layer/affine.lua @@ -34,10 +34,10 @@ function nerv.AffineLayer:update(bp_err, input, output) local gconf = self.gconf -- momentum gain local mmt_gain = 1.0 / (1.0 - gconf.momentum); - local n = input:nrow() * mmt_gain + local n = input[0]:nrow() * mmt_gain -- update corrections (accumulated errors) - ltc:mul(input, bp_err, 1.0, gconf.momentum, 'T', 'N') - bc:add(bc, bp_err:colsum(), gconf.momentum, 1.0) + ltc:mul(input[0], bp_err[0], 1.0, gconf.momentum, 'T', 'N') + bc:add(bc, bp_err[0]:colsum(), gconf.momentum, 1.0) -- perform update ltp:add(ltp, ltc, 1.0, -gconf.lrate / n) bp:add(bp, bc, 1.0, -gconf.lrate / n) @@ -47,11 +47,11 @@ end function nerv.AffineLayer:propagate(input, output) -- apply linear transform - output:mul(input, self.ltp.trans, 1.0, 0.0, 'N', 'N') + output[0]:mul(input[0], self.ltp.trans, 1.0, 0.0, 'N', 'N') -- add bias - output:add_row(self.bp.trans, 1.0) + output[0]:add_row(self.bp.trans, 1.0) end function nerv.AffineLayer:back_propagate(next_bp_err, bp_err, input, output) - next_bp_err:mul(bp_err, self.ltp.trans, 1.0, 0.0, 'N', 'T') + next_bp_err[0]:mul(bp_err[0], self.ltp.trans, 1.0, 0.0, 'N', 'T') end diff --git a/layer/sigmoid.lua b/layer/sigmoid.lua index d0a87c0..41a6ef7 100644 --- a/layer/sigmoid.lua +++ b/layer/sigmoid.lua @@ -10,9 +10,9 @@ function SigmoidLayer:update(bp_err, input, output) end function SigmoidLayer:propagate(input, output) - output:sigmoid(input) + output[0]:sigmoid(input[0]) end function SigmoidLayer:back_propagate(next_bp_err, bp_err, input, output) - next_bp_err:sigmoid_grad(bp_err, output) + next_bp_err[0]:sigmoid_grad(bp_err[0], output[0]) end diff --git a/matrix/init.lua b/matrix/init.lua index a04e83a..0075668 100644 --- a/matrix/init.lua +++ b/matrix/init.lua @@ -41,12 +41,12 @@ end function nerv.CuMatrixFloat.new_from_host(mat) local res = nerv.CuMatrixFloat(mat:nrow(), mat:ncol()) - res:copy_from(mat) + res:copy_fromh(mat) return res end function nerv.CuMatrixFloat:new_to_host() local res = nerv.MMatrixFloat(self:nrow(), self:ncol()) - self:copy_to(res) + self:copy_toh(res) return res end |