diff options
Diffstat (limited to 'nerv/layer')
-rw-r--r-- | nerv/layer/duplicate.lua | 5 | ||||
-rw-r--r-- | nerv/layer/softmax_ce.lua | 4 |
2 files changed, 4 insertions, 5 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua index 137472b..2621cdf 100644 --- a/nerv/layer/duplicate.lua +++ b/nerv/layer/duplicate.lua @@ -20,10 +20,7 @@ function DuplicateLayer:batch_resize() end function DuplicateLayer:propagate(input, output) - for i = 1, #self.dim_out do - output[i]:copy_from(input[1]) - -- FIXME: use reference copy to speed up - end + -- do nothing, use reference copy in nn/network.lua end function DuplicateLayer:back_propagate(bp_err, next_bp_err) diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua index 7b4a80c..acd4ee6 100644 --- a/nerv/layer/softmax_ce.lua +++ b/nerv/layer/softmax_ce.lua @@ -61,14 +61,16 @@ function SoftmaxCELayer:propagate(input, output, t) end ce:mul_elem(ce, label) ce = ce:rowsum() + ce:set_values_by_mask(self.gconf.mask[t], 0) if output[1] ~= nil then output[1]:copy_from(ce) end -- add total ce self.total_ce = self.total_ce - ce:colsum()[0][0] - self.total_frames = self.total_frames + softmax:nrow() + self.total_frames = self.total_frames + self.gconf.mask[t]:colsum()[0][0] -- TODO: add colsame for uncompressed label if self.compressed then + classified:set_values_by_mask(self.gconf.mask[t], -1) self.total_correct = self.total_correct + classified:colsame(input[2])[0][0] end end |