From 18b0e3d993ec5ce8e97a6affb533c9ace940bfff Mon Sep 17 00:00:00 2001
From: Determinant <ted.sybil@gmail.com>
Date: Tue, 1 Mar 2016 00:33:28 +0800
Subject: ...

---
 kaldi_decode/Makefile                         |   2 +-
 kaldi_decode/README.timit                     |   2 +-
 kaldi_io/tools/convert_from_kaldi_pretrain.sh |   4 +-
 tutorial/howto_pretrain_from_kaldi.rst        | 105 +++++++++++++++-----------
 4 files changed, 68 insertions(+), 45 deletions(-)

diff --git a/kaldi_decode/Makefile b/kaldi_decode/Makefile
index e3a7c2d..2ccedfb 100644
--- a/kaldi_decode/Makefile
+++ b/kaldi_decode/Makefile
@@ -25,7 +25,7 @@ OBJ_SUBDIR := $(addprefix $(OBJ_DIR)/,$(SUBDIR))
 
 KL := $(KDIR)/src/feat/kaldi-feat.a $(KDIR)/src/cudamatrix/kaldi-cudamatrix.a $(KDIR)/src/matrix/kaldi-matrix.a $(KDIR)/src/base/kaldi-base.a $(KDIR)/src/util/kaldi-util.a $(KDIR)/src/hmm/kaldi-hmm.a $(KDIR)/src/tree/kaldi-tree.a $(KDIR)/src/nnet/kaldi-nnet.a $(BLAS_LDFLAGS)
 
-build: $(OBJ_DIR) $(LUA_DIR) $(OBJ_SUBDIR) $(OBJS)
+build: $(OBJ_DIR) $(OBJ_SUBDIR) $(OBJS)
 $(OBJ_DIR)/%.o: %.cc
 	g++ -c -o $@ $< -Wall $(KALDIINCLUDE) -DHAVE_ATLAS -DKALDI_DOUBLEPRECISION=0 -DHAVE_POSIX_MEMALIGN -DLUA_USE_APICHECK -I $(LUA_INCDIR) -I $(INC_PATH) $(CFLAGS)
 $(OBJ_DIR)/nnet-forward: $(OBJ_DIR)/src/nnet-forward.o
diff --git a/kaldi_decode/README.timit b/kaldi_decode/README.timit
index 7fac918..0a3e33a 100755
--- a/kaldi_decode/README.timit
+++ b/kaldi_decode/README.timit
@@ -4,7 +4,7 @@ source cmd.sh
 
 gmmdir=/speechlab/users/mfy43/timit/s5/exp/tri3/
 data_fmllr=/speechlab/users/mfy43/timit/s5/data-fmllr-tri3/
-dir=/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_prepare/
+dir=/speechlab/users/mfy43/timit/s5/exp/dnn4_nerv_dnn/
 nerv_config=/speechlab/users/mfy43/nerv/nerv/examples/timit_baseline2.lua
 decode=/speechlab/users/mfy43/nerv/install/bin/decode_with_nerv.sh
 
diff --git a/kaldi_io/tools/convert_from_kaldi_pretrain.sh b/kaldi_io/tools/convert_from_kaldi_pretrain.sh
index 78f532f..81fe840 100755
--- a/kaldi_io/tools/convert_from_kaldi_pretrain.sh
+++ b/kaldi_io/tools/convert_from_kaldi_pretrain.sh
@@ -17,12 +17,13 @@ dir=$6
 
 [[ -z $data_fmllr ]] && data_fmllr=data-fmllr-tri3
 [[ -z $alidir ]] && alidir=exp/tri3_ali
-[[ -z $dir ]] && dir=exp/dnn4_nerv_prepare
+[[ -z $dir ]] && dir=exp/dnn4_nerv_dnn
 [[ -z $data ]] && data=$data_fmllr/train_tr90
 [[ -z $data_cv ]] && data_cv=$data_fmllr/train_cv10
 kaldi_to_nerv=$nerv_kaldi/tools/kaldi_to_nerv
 mkdir $dir -p
 mkdir $dir/log -p
+
 ###### PREPARE DATASETS ######
 cp $data/feats.scp $dir/train_sorted.scp
 cp $data_cv/feats.scp $dir/cv.scp
@@ -44,6 +45,7 @@ nnet-initialize --binary=false $nnet_proto $nnet_init
 $kaldi_to_nerv $nnet_init $dir/nnet_output.nerv $hid_num
 $kaldi_to_nerv <(nnet-copy --binary=false $pretrain_dir/${hid_num}.dbn -) $dir/nnet_init.nerv
 $kaldi_to_nerv <(nnet-copy --binary=false $pretrain_dir/final.feature_transform -) $dir/nnet_trans.nerv
+
 ###### PREPARE FOR DECODING #####
 echo "Using PDF targets from dirs '$alidir' '$alidir_cv'"
 # training targets in posterior format,
diff --git a/tutorial/howto_pretrain_from_kaldi.rst b/tutorial/howto_pretrain_from_kaldi.rst
index ff6ef3d..6b8253a 100644
--- a/tutorial/howto_pretrain_from_kaldi.rst
+++ b/tutorial/howto_pretrain_from_kaldi.rst
@@ -7,38 +7,53 @@ How to Use a Pretrained nnet Model from Kaldi
            NERV finetune. Finally it shows two possible ways to decode the
            finetuned model in Kaldi framework.
 
-- Locate the egs/timit inside Kaldi trunk directory.
+- Note: in this tutorial, we use the following notations to denote the directory prefix:
 
-- Configure ``cmd.sh`` and ``path.sh`` according to your machine setting.
+  - ``<nerv_home>``: the path of NERV (the location of outer most directory ``nerv``)
 
-- Open the ``run.sh`` and locate the line saying ``exit 0 # From this point
-  you can run Karel's DNN: local/nnet/run_dnn.sh``. Uncomment this line. This
-  is because in this tutorial, we only want to train a basic tri-phone DNN,
-  so we simply don't do MMI training, system combination or fancy things like
-  these.
+  - ``<timit_home>``: the working directory of timit (the location of directory ``timit/s5``)
 
-- Run ``./run.sh`` to start the training stages. After that, we will get
+- Locate the ``egs/timit`` inside Kaldi trunk directory.
+
+- Configure ``<timit_home>/cmd.sh`` and ``<timit_home>/path.sh`` according to your machine setting.
+
+- Open the ``<timit_home>/run.sh`` and locate the line saying
+
+  ::
+
+    exit 0 # From this point you can run Karel's DNN: local/nnet/run_dnn.sh
+  .
+  Uncomment this line. This is because in this tutorial, we only want to train
+  a basic tri-phone DNN, so we simply don't do MMI training, system combination
+  or fancy things like these.
+
+- Run ``./run.sh`` (at ``<timit_home>``) to start the training stages. After that, we will get
   tri-phone GMM-HMM trained and the aligned labels. Let's move forward to
   pretrain a DNN.
 
-- Open ``local/nnet/run_dnn.sh``, there are again several stages. Note that
-  the first stage is what we actually need (pretraining the DNN), since in
-  this tutorial we want to demonstrate how to get the pretrained model from
-  stage 1, replace stage 2 with NERV (finetune per-frame cross-entropy), and
-  decode using the finetuned network. However, here we add a line ``exit 0``
-  after stage 2 to preserve stage 2 in order to compare the NERV result
-  against the standard one (the decode result using finetuned model produced
-  by the original stage 2).
+- Open ``<timit_home>/local/nnet/run_dnn.sh``, there are again several stages.
+  Note that the first stage is what we actually need (pretraining the DNN),
+  since in this tutorial we want to demonstrate how to get the pretrained model
+  from stage 1, replace stage 2 with NERV (finetune per-frame cross-entropy),
+  and decode using the finetuned network. However, here we add a line ``exit
+  0`` after stage 2 to preserve stage 2 in order to compare the NERV result
+  against the standard one (the decode result using finetuned model produced by
+  the original stage 2).
 
-- Run ``local/nnet/run_dnn.sh`` (first two stages).
+- Run ``local/nnet/run_dnn.sh`` (at ``<timit_home>``, for first two stages).
 - You'll find directory like ``dnn4_pretrain-dbn`` and
-  ``dnn4_pretrain-dbn_dnn`` inside the ``exp/``. They correspond to stage 1 and
-  stage 2 respectively. To use NERV to do stage 2 instead, we need the
-  pretrained network and the global transformation from stage 1:
+  ``dnn4_pretrain-dbn_dnn`` inside the ``<timit_home>/exp/``. They correspond
+  to stage 1 and stage 2 respectively. To use NERV to do stage 2 instead, we
+  need the pretrained network and the global transformation from stage 1:
   
-  - Check the file ``exp/dnn4_pretrain-dbn/6.dbn`` exists. (pretrained network)
-  - Check the file ``exp/dnn4_pretrain-dbn/tr_splice5_cmvn-g.nnet`` exists. (global transformation)
-  - Run script from ``kaldi_io/tools/convert_from_kaldi_pretrain.sh`` to
+  - Check the file ``<timit_home>/exp/dnn4_pretrain-dbn/6.dbn`` exists.
+    (pretrained network)
+
+  - Check the file
+    ``<timit_home>/exp/dnn4_pretrain-dbn/tr_splice5_cmvn-g.nnet`` exists.
+    (global transformation)
+
+  - Run script from ``<nerv_home>/speech/kaldi_io/tools/convert_from_kaldi_pretrain.sh`` to
     generate the parameters for the output layer and the script files for
     training and cross-validation set.
 
@@ -47,18 +62,25 @@ How to Use a Pretrained nnet Model from Kaldi
     example, ``affine0_ltp`` and ``bias0``. These names should correspond to
     the identifiers used in the declaration of the network. Luckily, this
     tutorial comes with a written network declaration at
-    ``nerv/examples/timit_baseline2.lua``.
+    ``<nerv_home>/nerv/examples/timit_baseline2.lua``.
+
+- Copy the file ``<nerv_home>/nerv/examples/timit_baseline2.lua`` to
+  ``<timit_home>/timit_mybaseline.lua``, and change the line containing
+  ``/speechlab`` to your own setting.
 
-- Copy the file ``nerv/examples/timit_baseline2.lua`` to
-  ``timit_mybaseline.lua``, and change the line containing ``/speechlab`` to
-  your own setting.
+- Start the NERV training by
+  
+  ::
+  
+    <nerv_home>/install/bin/nerv <nerv_home>/nerv/examples/asr_trainer.lua timit_mybaseline.lua
 
-- Start the NERV training by ``install/bin/nerv nerv/examples/asr_trainer.lua timit_mybaseline.lua``.
+  (at ``<timit_home>``).
 
-  - ``install/bin/nerv`` is the program which sets up the NERV environment,
+  - ``<nerv_home>/install/bin/nerv`` is the program which sets up the NERV environment,
 
-  - followed by an argument ``nerv/examples/asr_trainer.lua`` which is the script
-    you actually want to run (the general DNN training scheduler),
+  - followed by an argument ``<nerv_home>/nerv/examples/asr_trainer.lua`` which
+    is the script you actually want to run (the general DNN training
+    scheduler),
 
   - followed by an argument ``timit_mybaseline.lua`` to the scheduler,
     specifying the network you want to train and some relevant settings, such
@@ -74,19 +96,17 @@ How to Use a Pretrained nnet Model from Kaldi
       global transformation chunk file once used in training. This part lets
       the decoder know about the set of parameters for decoding.
 
-    - Copy the script ``nerv/speech/kaldi_io/README.timit`` to your Kaldi
-      working directory (``timit/s5``) and modify the paths listed in the
-      script.
+    - Copy the script ``<nerv_home>/nerv/speech/kaldi_io/README.timit`` to
+      ``<timit_home>`` and modify the paths listed in the script.
 
-    - Run the modified ``README.timit`` in ``s5`` directory (where there is the
-      ``path.sh``).
+    - Run the modified ``README.timit`` (at ``<timit_home>``).
 
-    - After decoding, run ``bash RESULT exp/dnn4_nerv`` to see the results.
+    - After decoding, run ``bash RESULT exp/dnn4_nerv_dnn`` to see the results.
 
   - Plan B: In this plan, we manually convert the trained model back to Kaldi
     nnet format, and use Kaldi to decode.
 
-    - Create a copy of ``nerv/speech/kaldi_io/tools/nerv_to_kaldi.lua``.
+    - Create a copy of ``<nerv_home>/nerv/speech/kaldi_io/tools/nerv_to_kaldi.lua``.
 
     - Modify the list named ``lnames`` to list the name of layers you want to
       put into the output Kaldi parameter file in order. (You don't actually
@@ -103,14 +123,15 @@ How to Use a Pretrained nnet Model from Kaldi
       ::
      
          cat your_trained_params.nerv your_global_trans.nerv > all.nerv
-         install/bin/nerv nerv_to_kaldi.lua timit_mybaseline.lua all.nerv your_kaldi_output.nnet
+         <nerv_home>/install/bin/nerv nerv_to_kaldi.lua timit_mybaseline.lua all.nerv your_kaldi_output.nnet
 
-    - Finally, locate the directory of stage 2: ``exp/dnn4_pretrain-dbn_dnn``
-      and temporarily change the symbolic link for the final network file to the converted one:
+    - Finally, locate the directory of stage 2:
+      ``<timit_home>/exp/dnn4_pretrain-dbn_dnn`` and temporarily change the
+      symbolic link for the final network file to the converted one:
 
       ::
         
-         cd exp/dnn4_pretrain-dbn_dnn
+         cd <timit_home>/exp/dnn4_pretrain-dbn_dnn
          mv final.nnet final.nnet.orig
          ln -sv your_kaldi_output.nnet final.nnet
 
-- 
cgit v1.2.3-70-g09d2