diff options
Diffstat (limited to 'nerv/layer')
-rw-r--r-- | nerv/layer/init.lua | 2 | ||||
-rw-r--r-- | nerv/layer/mmi.lua | 50 | ||||
-rw-r--r-- | nerv/layer/mpe.lua | 52 |
3 files changed, 0 insertions, 104 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 25dfebb..6861b0e 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -79,5 +79,3 @@ nerv.include('mse.lua') nerv.include('combiner.lua') nerv.include('affine_recurrent.lua') nerv.include('softmax.lua') -nerv.include('mpe.lua') -nerv.include('mmi.lua') diff --git a/nerv/layer/mmi.lua b/nerv/layer/mmi.lua deleted file mode 100644 index ecc7f48..0000000 --- a/nerv/layer/mmi.lua +++ /dev/null @@ -1,50 +0,0 @@ -require 'libkaldiseq' -local MMILayer = nerv.class("nerv.MMILayer", "nerv.Layer") - -function MMILayer:__init(id, global_conf, layer_conf) - self.id = id - self.gconf = global_conf - self.dim_in = layer_conf.dim_in - self.dim_out = layer_conf.dim_out - self.arg = layer_conf.cmd.arg - self.mdl = layer_conf.cmd.mdl - self.lat = layer_conf.cmd.lat - self.ali = layer_conf.cmd.ali - self:check_dim_len(2, -1) -- two inputs: nn output and utt key -end - -function MMILayer:init(batch_size) - self.total_frames = 0 - self.kaldi_mmi = nerv.KaldiMMI(self.arg, self.mdl, self.lat, self.ali) - if self.kaldi_mmi == nil then - nerv.error("kaldi arguments is expected: %s %s %s %s", self.arg, - self.mdl, self.lat, self.ali) - end -end - -function MMILayer:batch_resize(batch_size) - -- do nothing -end - -function MMILayer:update(bp_err, input, output) - -- no params, therefore do nothing -end - -function MMILayer:propagate(input, output) - self.valid = false - self.valid = self.kaldi_mmi:check(input[1], input[2]) - return self.valid -end - -function MMILayer:back_propagate(bp_err, next_bp_err, input, output) - if self.valid ~= true then - nerv.error("kaldi sequence training back_propagate fail") - end - local mmat = input[1]:new_to_host() - next_bp_err[1]:copy_fromh(self.kaldi_mmi:calc_diff(mmat, input[2])) - self.total_frames = self.total_frames + self.kaldi_mmi:get_num_frames() -end - -function MMILayer:get_params() - return nerv.ParamRepo({}) -end diff --git a/nerv/layer/mpe.lua b/nerv/layer/mpe.lua deleted file mode 100644 index ec8a8f3..0000000 --- a/nerv/layer/mpe.lua +++ /dev/null @@ -1,52 +0,0 @@ -require 'libkaldiseq' -local MPELayer = nerv.class("nerv.MPELayer", "nerv.Layer") - -function MPELayer:__init(id, global_conf, layer_conf) - self.id = id - self.gconf = global_conf - self.dim_in = layer_conf.dim_in - self.dim_out = layer_conf.dim_out - self.arg = layer_conf.cmd.arg - self.mdl = layer_conf.cmd.mdl - self.lat = layer_conf.cmd.lat - self.ali = layer_conf.cmd.ali - self:check_dim_len(2, -1) -- two inputs: nn output and utt key -end - -function MPELayer:init(batch_size) - self.total_correct = 0 - self.total_frames = 0 - self.kaldi_mpe = nerv.KaldiMPE(self.arg, self.mdl, self.lat, self.ali) - if self.kaldi_mpe == nil then - nerv.error("kaldi arguments is expected: %s %s %s %s", self.arg, - self.mdl, self.lat, self.ali) - end -end - -function MPELayer:batch_resize(batch_size) - -- do nothing -end - -function MPELayer:update(bp_err, input, output) - -- no params, therefore do nothing -end - -function MPELayer:propagate(input, output) - self.valid = false - self.valid = self.kaldi_mpe:check(input[1], input[2]) - return self.valid -end - -function MPELayer:back_propagate(bp_err, next_bp_err, input, output) - if self.valid ~= true then - nerv.error("kaldi sequence training back_propagate fail") - end - local mmat = input[1]:new_to_host() - next_bp_err[1]:copy_fromh(self.kaldi_mpe:calc_diff(mmat, input[2])) - self.total_frames = self.total_frames + self.kaldi_mpe:get_num_frames() - self.total_correct = self.total_correct + self.kaldi_mpe:get_utt_frame_acc() -end - -function MPELayer:get_params() - return nerv.ParamRepo({}) -end |