aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYimmon Zhuang <yimmon.zhuang@gmail.com>2015-10-10 22:32:51 +0800
committerYimmon Zhuang <yimmon.zhuang@gmail.com>2015-10-10 22:32:51 +0800
commit473eb9c082224be19f147697ba951ae5bac4b4b4 (patch)
tree6b3b1866fcac3748b334c5b6dbae8d83b0ccf28f
parent60e7718f4db981557ac48d9d375d8e23b1cd39d1 (diff)
move sequence related layers to kaldi_seq
-rw-r--r--nerv/Makefile1
-rw-r--r--nerv/examples/mmi_chime3.lua1
-rw-r--r--nerv/examples/mpe_chime3.lua1
-rw-r--r--nerv/layer/init.lua2
-rw-r--r--nerv/layer/mmi.lua50
-rw-r--r--nerv/layer/mpe.lua52
-rw-r--r--nerv/lib/matrix/generic/matrix.c2
7 files changed, 3 insertions, 106 deletions
diff --git a/nerv/Makefile b/nerv/Makefile
index ce178a0..b449f82 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -32,7 +32,6 @@ LIBS := $(INST_LIBDIR)/libnerv.so $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT
LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/init.lua layer/affine.lua layer/sigmoid.lua layer/softmax_ce.lua layer/softmax.lua \
layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua layer/affine_recurrent.lua \
- layer/mpe.lua layer/mmi.lua \
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
io/sgd_buffer.lua
diff --git a/nerv/examples/mmi_chime3.lua b/nerv/examples/mmi_chime3.lua
index a7ad268..6ac7f28 100644
--- a/nerv/examples/mmi_chime3.lua
+++ b/nerv/examples/mmi_chime3.lua
@@ -1,4 +1,5 @@
require 'kaldi_io'
+require 'kaldi_seq'
gconf = {lrate = 0.00001, wcost = 0, momentum = 0.0,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
diff --git a/nerv/examples/mpe_chime3.lua b/nerv/examples/mpe_chime3.lua
index be723ca..ec095b0 100644
--- a/nerv/examples/mpe_chime3.lua
+++ b/nerv/examples/mpe_chime3.lua
@@ -1,4 +1,5 @@
require 'kaldi_io'
+require 'kaldi_seq'
gconf = {lrate = 0.00001, wcost = 0, momentum = 0.0,
cumat_type = nerv.CuMatrixFloat,
mmat_type = nerv.MMatrixFloat,
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 25dfebb..6861b0e 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -79,5 +79,3 @@ nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('affine_recurrent.lua')
nerv.include('softmax.lua')
-nerv.include('mpe.lua')
-nerv.include('mmi.lua')
diff --git a/nerv/layer/mmi.lua b/nerv/layer/mmi.lua
deleted file mode 100644
index ecc7f48..0000000
--- a/nerv/layer/mmi.lua
+++ /dev/null
@@ -1,50 +0,0 @@
-require 'libkaldiseq'
-local MMILayer = nerv.class("nerv.MMILayer", "nerv.Layer")
-
-function MMILayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.arg = layer_conf.cmd.arg
- self.mdl = layer_conf.cmd.mdl
- self.lat = layer_conf.cmd.lat
- self.ali = layer_conf.cmd.ali
- self:check_dim_len(2, -1) -- two inputs: nn output and utt key
-end
-
-function MMILayer:init(batch_size)
- self.total_frames = 0
- self.kaldi_mmi = nerv.KaldiMMI(self.arg, self.mdl, self.lat, self.ali)
- if self.kaldi_mmi == nil then
- nerv.error("kaldi arguments is expected: %s %s %s %s", self.arg,
- self.mdl, self.lat, self.ali)
- end
-end
-
-function MMILayer:batch_resize(batch_size)
- -- do nothing
-end
-
-function MMILayer:update(bp_err, input, output)
- -- no params, therefore do nothing
-end
-
-function MMILayer:propagate(input, output)
- self.valid = false
- self.valid = self.kaldi_mmi:check(input[1], input[2])
- return self.valid
-end
-
-function MMILayer:back_propagate(bp_err, next_bp_err, input, output)
- if self.valid ~= true then
- nerv.error("kaldi sequence training back_propagate fail")
- end
- local mmat = input[1]:new_to_host()
- next_bp_err[1]:copy_fromh(self.kaldi_mmi:calc_diff(mmat, input[2]))
- self.total_frames = self.total_frames + self.kaldi_mmi:get_num_frames()
-end
-
-function MMILayer:get_params()
- return nerv.ParamRepo({})
-end
diff --git a/nerv/layer/mpe.lua b/nerv/layer/mpe.lua
deleted file mode 100644
index ec8a8f3..0000000
--- a/nerv/layer/mpe.lua
+++ /dev/null
@@ -1,52 +0,0 @@
-require 'libkaldiseq'
-local MPELayer = nerv.class("nerv.MPELayer", "nerv.Layer")
-
-function MPELayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.arg = layer_conf.cmd.arg
- self.mdl = layer_conf.cmd.mdl
- self.lat = layer_conf.cmd.lat
- self.ali = layer_conf.cmd.ali
- self:check_dim_len(2, -1) -- two inputs: nn output and utt key
-end
-
-function MPELayer:init(batch_size)
- self.total_correct = 0
- self.total_frames = 0
- self.kaldi_mpe = nerv.KaldiMPE(self.arg, self.mdl, self.lat, self.ali)
- if self.kaldi_mpe == nil then
- nerv.error("kaldi arguments is expected: %s %s %s %s", self.arg,
- self.mdl, self.lat, self.ali)
- end
-end
-
-function MPELayer:batch_resize(batch_size)
- -- do nothing
-end
-
-function MPELayer:update(bp_err, input, output)
- -- no params, therefore do nothing
-end
-
-function MPELayer:propagate(input, output)
- self.valid = false
- self.valid = self.kaldi_mpe:check(input[1], input[2])
- return self.valid
-end
-
-function MPELayer:back_propagate(bp_err, next_bp_err, input, output)
- if self.valid ~= true then
- nerv.error("kaldi sequence training back_propagate fail")
- end
- local mmat = input[1]:new_to_host()
- next_bp_err[1]:copy_fromh(self.kaldi_mpe:calc_diff(mmat, input[2]))
- self.total_frames = self.total_frames + self.kaldi_mpe:get_num_frames()
- self.total_correct = self.total_correct + self.kaldi_mpe:get_utt_frame_acc()
-end
-
-function MPELayer:get_params()
- return nerv.ParamRepo({})
-end
diff --git a/nerv/lib/matrix/generic/matrix.c b/nerv/lib/matrix/generic/matrix.c
index 4319e13..4246751 100644
--- a/nerv/lib/matrix/generic/matrix.c
+++ b/nerv/lib/matrix/generic/matrix.c
@@ -4,7 +4,7 @@
/* FIXME: malloc failure detection */
void nerv_matrix_(data_free)(Matrix *self, Status *status) {
- if(*self->data_ref == 0) return;
+ if(*self->data_ref == 0) return; /* FIXME: repeat free memory */
assert(*self->data_ref > 0);
if (--(*self->data_ref) == 0)
{