aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/duplicate.lua5
-rw-r--r--nerv/layer/rnn.lua2
-rw-r--r--nerv/layer/softmax_ce.lua4
3 files changed, 5 insertions, 6 deletions
diff --git a/nerv/layer/duplicate.lua b/nerv/layer/duplicate.lua
index 137472b..2621cdf 100644
--- a/nerv/layer/duplicate.lua
+++ b/nerv/layer/duplicate.lua
@@ -20,10 +20,7 @@ function DuplicateLayer:batch_resize()
end
function DuplicateLayer:propagate(input, output)
- for i = 1, #self.dim_out do
- output[i]:copy_from(input[1])
- -- FIXME: use reference copy to speed up
- end
+ -- do nothing, use reference copy in nn/network.lua
end
function DuplicateLayer:back_propagate(bp_err, next_bp_err)
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
index 0b5ccaa..333be9e 100644
--- a/nerv/layer/rnn.lua
+++ b/nerv/layer/rnn.lua
@@ -20,7 +20,7 @@ function RNNLayer:__init(id, global_conf, layer_conf)
['nerv.AffineLayer'] = {
main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
},
- [layers.activation] = {
+ [layer_conf.activation] = {
activation = {dim_in = {dout}, dim_out = {dout}},
},
['nerv.DuplicateLayer'] = {
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index 7b4a80c..acd4ee6 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -61,14 +61,16 @@ function SoftmaxCELayer:propagate(input, output, t)
end
ce:mul_elem(ce, label)
ce = ce:rowsum()
+ ce:set_values_by_mask(self.gconf.mask[t], 0)
if output[1] ~= nil then
output[1]:copy_from(ce)
end
-- add total ce
self.total_ce = self.total_ce - ce:colsum()[0][0]
- self.total_frames = self.total_frames + softmax:nrow()
+ self.total_frames = self.total_frames + self.gconf.mask[t]:colsum()[0][0]
-- TODO: add colsame for uncompressed label
if self.compressed then
+ classified:set_values_by_mask(self.gconf.mask[t], -1)
self.total_correct = self.total_correct + classified:colsame(input[2])[0][0]
end
end