aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua4
-rw-r--r--nerv/layer/affine_recurrent.lua4
-rw-r--r--nerv/layer/bias.lua4
-rw-r--r--nerv/layer/combiner.lua6
-rw-r--r--nerv/layer/init.lua1
-rw-r--r--nerv/layer/mpe.lua52
-rw-r--r--nerv/layer/mse.lua8
-rw-r--r--nerv/layer/sigmoid.lua4
-rw-r--r--nerv/layer/softmax.lua4
-rw-r--r--nerv/layer/softmax_ce.lua7
-rw-r--r--nerv/layer/window.lua4
11 files changed, 98 insertions, 0 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
index 00cbcfb..6c90e3e 100644
--- a/nerv/layer/affine.lua
+++ b/nerv/layer/affine.lua
@@ -60,6 +60,10 @@ function AffineLayer:init(batch_size)
self.bp:train_init()
end
+function AffineLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function AffineLayer:update(bp_err, input, output)
if self.direct_update then
self.ltp.correction:mul(input[1], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
index 59d259c..92d98e2 100644
--- a/nerv/layer/affine_recurrent.lua
+++ b/nerv/layer/affine_recurrent.lua
@@ -37,6 +37,10 @@ function Recurrent:init(batch_size)
self.bp:train_init()
end
+function Recurrent:batch_resize(batch_size)
+ -- do nothing
+end
+
function Recurrent:update(bp_err, input, output)
if (self.direct_update == true) then
local ltp_hh = self.ltp_hh.trans
diff --git a/nerv/layer/bias.lua b/nerv/layer/bias.lua
index c99274d..7e9fd46 100644
--- a/nerv/layer/bias.lua
+++ b/nerv/layer/bias.lua
@@ -18,6 +18,10 @@ function BiasLayer:init()
end
end
+function BiasLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function BiasLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:add_row(self.bias.trans, 1.0)
diff --git a/nerv/layer/combiner.lua b/nerv/layer/combiner.lua
index 7bd7617..1bcfdfb 100644
--- a/nerv/layer/combiner.lua
+++ b/nerv/layer/combiner.lua
@@ -30,6 +30,12 @@ function CombinerLayer:init(batch_size)
self.sum = self.gconf.cumat_type(batch_size, dim)
end
+function CombinerLayer:batch_resize(batch_size)
+ if self.sum:nrow() ~= batch_size then
+ self.sum = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ end
+end
+
function CombinerLayer:update(bp_err, input, output)
end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 6861b0e..b74422f 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -79,3 +79,4 @@ nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('affine_recurrent.lua')
nerv.include('softmax.lua')
+nerv.include('mpe.lua')
diff --git a/nerv/layer/mpe.lua b/nerv/layer/mpe.lua
new file mode 100644
index 0000000..ec8a8f3
--- /dev/null
+++ b/nerv/layer/mpe.lua
@@ -0,0 +1,52 @@
+require 'libkaldiseq'
+local MPELayer = nerv.class("nerv.MPELayer", "nerv.Layer")
+
+function MPELayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.arg = layer_conf.cmd.arg
+ self.mdl = layer_conf.cmd.mdl
+ self.lat = layer_conf.cmd.lat
+ self.ali = layer_conf.cmd.ali
+ self:check_dim_len(2, -1) -- two inputs: nn output and utt key
+end
+
+function MPELayer:init(batch_size)
+ self.total_correct = 0
+ self.total_frames = 0
+ self.kaldi_mpe = nerv.KaldiMPE(self.arg, self.mdl, self.lat, self.ali)
+ if self.kaldi_mpe == nil then
+ nerv.error("kaldi arguments is expected: %s %s %s %s", self.arg,
+ self.mdl, self.lat, self.ali)
+ end
+end
+
+function MPELayer:batch_resize(batch_size)
+ -- do nothing
+end
+
+function MPELayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function MPELayer:propagate(input, output)
+ self.valid = false
+ self.valid = self.kaldi_mpe:check(input[1], input[2])
+ return self.valid
+end
+
+function MPELayer:back_propagate(bp_err, next_bp_err, input, output)
+ if self.valid ~= true then
+ nerv.error("kaldi sequence training back_propagate fail")
+ end
+ local mmat = input[1]:new_to_host()
+ next_bp_err[1]:copy_fromh(self.kaldi_mpe:calc_diff(mmat, input[2]))
+ self.total_frames = self.total_frames + self.kaldi_mpe:get_num_frames()
+ self.total_correct = self.total_correct + self.kaldi_mpe:get_utt_frame_acc()
+end
+
+function MPELayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/layer/mse.lua b/nerv/layer/mse.lua
index 2516998..0ee3080 100644
--- a/nerv/layer/mse.lua
+++ b/nerv/layer/mse.lua
@@ -20,6 +20,14 @@ function MSELayer:init(batch_size)
self.diff = self.mse:create()
end
+function MSELayer:batch_resize(batch_size)
+ if self.mse:nrow() ~= batch_resize then
+ self.mse = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.mse_sum = self.gconf.cumat_type(batch_size, 1)
+ self.diff = self.mse:create()
+ end
+end
+
function MSELayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua
index dfd09eb..0a8bcdc 100644
--- a/nerv/layer/sigmoid.lua
+++ b/nerv/layer/sigmoid.lua
@@ -14,6 +14,10 @@ function SigmoidLayer:init()
end
end
+function SigmoidLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function SigmoidLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/softmax.lua b/nerv/layer/softmax.lua
index e979ebf..4205b66 100644
--- a/nerv/layer/softmax.lua
+++ b/nerv/layer/softmax.lua
@@ -14,6 +14,10 @@ function SoftmaxLayer:init(batch_size)
end
end
+function SoftmaxLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function SoftmaxLayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
index f878a2f..9071e86 100644
--- a/nerv/layer/softmax_ce.lua
+++ b/nerv/layer/softmax_ce.lua
@@ -23,6 +23,13 @@ function SoftmaxCELayer:init(batch_size)
self.ce = self.softmax:create()
end
+function SoftmaxCELayer:batch_resize(batch_size)
+ if self.softmax:nrow() ~= batch_resize then
+ self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.ce = self.softmax:create()
+ end
+end
+
function SoftmaxCELayer:update(bp_err, input, output)
-- no params, therefore do nothing
end
diff --git a/nerv/layer/window.lua b/nerv/layer/window.lua
index 4e9a3b1..8eed352 100644
--- a/nerv/layer/window.lua
+++ b/nerv/layer/window.lua
@@ -18,6 +18,10 @@ function WindowLayer:init()
end
end
+function WindowLayer:batch_resize(batch_size)
+ -- do nothing
+end
+
function WindowLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
output[1]:scale_rows_by_row(self.window.trans)