diff options
author | Qi Liu <liuq901@163.com> | 2016-06-21 15:49:07 +0800 |
---|---|---|
committer | Qi Liu <liuq901@163.com> | 2016-06-21 15:49:07 +0800 |
commit | 3622d8315aad9f8438b1cfcb734165de459725a9 (patch) | |
tree | ed20795f95854c72137bd7537a4097582e997fb3 /nerv/layer/softmax.lua | |
parent | bc49910f6f55620a4fb4e7038e751bab52fdafa6 (diff) | |
parent | 3856e63dab1b28aaec4133b6b0ec2a44ebf8cf46 (diff) |
Master
add back propagation function for softmax
See merge request !6
Diffstat (limited to 'nerv/layer/softmax.lua')
-rw-r--r-- | nerv/layer/softmax.lua | 7 |
1 files changed, 6 insertions, 1 deletions
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua index f7a5163..6789ccc 100644 --- a/nerv/layer/softmax.lua +++ b/nerv/layer/softmax.lua @@ -28,7 +28,12 @@ function SoftmaxLayer:propagate(input, output) end function SoftmaxLayer:back_propagate(bp_err, next_bp_err, input, output) - nerv.error_method_not_implemented() + local nbe = next_bp_err[1] + nbe:mul_elem(bp_err[1], output[1]) + local offset = nbe:rowsum() + nbe:copy_from(bp_err[1]) + nbe:add_col(offset, -1.0) + nbe:mul_elem(nbe, output[1]) end function SoftmaxLayer:get_params() |