aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua4
-rw-r--r--nerv/layer/affine_recurrent.lua4
-rw-r--r--nerv/layer/bias.lua4
-rw-r--r--nerv/layer/combiner.lua6
-rw-r--r--nerv/layer/mse.lua8
-rw-r--r--nerv/layer/sigmoid.lua4
-rw-r--r--nerv/layer/softmax.lua4
-rw-r--r--nerv/layer/softmax_ce.lua7
-rw-r--r--nerv/layer/window.lua4
9 files changed, 45 insertions, 0 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index b81b2a4..015ec3f 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -60,6 +60,10 @@ function AffineLayer:init(batch_size)
self.bp:train_init()
end
+function AffineLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function AffineLayer:update(bp_err, input, output)
if self.direct_update then
self.ltp.correction:mul(input[1], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
index 59d259c..92d98e2 100644
--- a/nerv/layer/affine_recurrent.lua
+++ b/nerv/layer/affine_recurrent.lua
@@ -37,6 +37,10 @@ function Recurrent:init(batch_size)
self.bp:train_init()
end
+function Recurrent:batch_resize(batch_size)
+ -- do nothing
+end
+
function Recurrent:update(bp_err, input, output)
if (self.direct_update == true) then
local ltp_hh = self.ltp_hh.trans
diff --git a/nerv/layer/bias.lua b/nerv/layer/bias.lua
index c99274d..7e9fd46 100644
--- a/nerv/layer/bias.lua
+++ b/nerv/layer/bias.lua
@@ -18,6 +18,10 @@ function BiasLayer:init()
end
end
+function BiasLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function BiasLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:add_row(self.bias.trans, 1.0)
diff --git a/nerv/layer/combiner.lua b/nerv/layer/combiner.lua
index 7bd7617..1bcfdfb 100644
--- a/nerv/layer/combiner.lua
+++ b/nerv/layer/combiner.lua
@@ -30,6 +30,12 @@ function CombinerLayer:init(batch_size)
self.sum = self.gconf.cumat_type(batch_size, dim)
end
+function CombinerLayer:batch_resize(batch_size)
+ if self.sum:nrow() ~= batch_size then
+ self.sum = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ end
+end
+
function CombinerLayer:update(bp_err, input, output)
end
diff --git a/nerv/layer/mse.lua b/nerv/layer/mse.lua
index 2516998..0ee3080 100644
--- a/nerv/layer/mse.lua
+++ b/nerv/layer/mse.lua
@@ -20,6 +20,14 @@ function MSELayer:init(batch_size)
self.diff = self.mse:create()
end
+function MSELayer:batch_resize(batch_size)
+ if self.mse:nrow() ~= batch_resize then
+ self.mse = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.mse_sum = self.gconf.cumat_type(batch_size, 1)
+ self.diff = self.mse:create()
+ end
+end
+
function MSELayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua
index dfd09eb..0a8bcdc 100644
--- a/nerv/layer/sigmoid.lua
+++ b/nerv/layer/sigmoid.lua
@@ -14,6 +14,10 @@ function SigmoidLayer:init()
end
end
+function SigmoidLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function SigmoidLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua
index e979ebf..4205b66 100644
--- a/nerv/layer/softmax.lua
+++ b/nerv/layer/softmax.lua
@@ -14,6 +14,10 @@ function SoftmaxLayer:init(batch_size)
end
end
+function SoftmaxLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function SoftmaxLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index f878a2f..9071e86 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -23,6 +23,13 @@ function SoftmaxCELayer:init(batch_size)
self.ce = self.softmax:create()
end
+function SoftmaxCELayer:batch_resize(batch_size)
+ if self.softmax:nrow() ~= batch_resize then
+ self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.ce = self.softmax:create()
+ end
+end
+
function SoftmaxCELayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/window.lua b/nerv/layer/window.lua
index 4e9a3b1..8eed352 100644
--- a/nerv/layer/window.lua
+++ b/nerv/layer/window.lua
@@ -18,6 +18,10 @@ function WindowLayer:init()
end
end
+function WindowLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function WindowLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:scale_rows_by_row(self.window.trans)