aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/combiner.lua
diff options
context:
space:
mode:
authorTed Yin <ted.sybil@gmail.com>2015-10-12 09:25:32 +0800
committerTed Yin <ted.sybil@gmail.com>2015-10-12 09:25:32 +0800
commit64fce92b7845b716f3c168036691c37b2467d99b (patch)
tree7c3da4cd2647d97cb9d1edb3386f59f64eb8030d /nerv/layer/combiner.lua
parentbd6d0d8b72ec656dd8fa0c13aa602f4f6e022391 (diff)
parent473eb9c082224be19f147697ba951ae5bac4b4b4 (diff)
Merge pull request #6 from yimmon/master
support batch_resize (sequence training related layers are in kaldi_seq)
Diffstat (limited to 'nerv/layer/combiner.lua')
-rw-r--r--nerv/layer/combiner.lua6
1 files changed, 6 insertions, 0 deletions
diff --git a/nerv/layer/combiner.lua b/nerv/layer/combiner.lua
index 7bd7617..1bcfdfb 100644
--- a/nerv/layer/combiner.lua
+++ b/nerv/layer/combiner.lua
@@ -30,6 +30,12 @@ function CombinerLayer:init(batch_size)
self.sum = self.gconf.cumat_type(batch_size, dim)
end
+function CombinerLayer:batch_resize(batch_size)
+ if self.sum:nrow() ~= batch_size then
+ self.sum = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ end
+end
+
function CombinerLayer:update(bp_err, input, output)
end