#!/bin/bash # Copyright 2012-2013 Karel Vesely, Daniel Povey # Apache 2.0 # Begin configuration section. nnet= # non-default location of DNN (optional) feature_transform= # non-default location of feature_transform (optional) model= # non-default location of transition model (optional) class_frame_counts= # non-default location of PDF counts (optional) srcdir= # non-default location of DNN-dir (decouples model dir from decode dir) stage=0 # stage=1 skips lattice generation nj=4 cmd=run.pl acwt=0.10 # note: only really affects pruning (scoring is on lattices). beam=13.0 lattice_beam=8.0 min_active=200 max_active=7000 # limit of active tokens max_mem=50000000 # approx. limit to memory consumption during minimization in bytes nnet_forward_opts="--prior-scale=1.0" skip_scoring=false scoring_opts="--min-lmwt 4 --max-lmwt 15" num_threads=1 # if >1, will use latgen-faster-parallel parallel_opts= # Ignored now. use_gpu="no" # yes|no|optionaly # End configuration section. echo "$0 $@" # Print the command line for logging [ -f ./path.sh ] && . ./path.sh; # source the path. . parse_options.sh || exit 1; if [ $# != 4 ]; then echo "Usage: $0 [options] " echo "... where is assumed to be a sub-directory of the directory" echo " where the DNN and transition model is." echo "e.g.: $0 exp/dnn1/graph_tgpr data/test config.lua exp/dnn1/decode_tgpr" echo "" echo "This script works on plain or modified features (CMN,delta+delta-delta)," echo "which are then sent through feature-transform. It works out what type" echo "of features you used from content of srcdir." echo "" echo "main options (for others, see top of script file)" echo " --config # config containing options" echo " --nj # number of parallel jobs" echo " --cmd (utils/run.pl|utils/queue.pl ) # how to run jobs." echo "" echo " --srcdir # non-default dir with DNN/models, can be different" echo " # from parent dir of ' (opt.)" echo "" echo " --acwt # select acoustic scale for decoding" echo " --scoring-opts # options forwarded to local/score.sh" echo " --num-threads # N>1: run multi-threaded decoder" exit 1; fi graphdir=$1 data=$2 config=$3 dir=$4 [ -z $srcdir ] && srcdir=`dirname $dir`; # Default model directory one level up from decoding directory. sdata=$data/split$nj; mkdir -p $dir/log [[ -d $sdata && $data/feats.scp -ot $sdata ]] || split_data.sh $data $nj || exit 1; echo $nj > $dir/num_jobs # Select default locations to model files (if not already set externally) [ -z "$model" ] && model=$srcdir/final.mdl # [ -z "$class_frame_counts" -a -f $srcdir/prior_counts ] && class_frame_counts=$srcdir/prior_counts # priority, [ -z "$class_frame_counts" ] && class_frame_counts=$srcdir/ali_train_pdf.counts # Check that files exist for f in $sdata/1/feats.scp $model $class_frame_counts $graphdir/HCLG.fst; do [ ! -f $f ] && echo "$0: missing file $f" && exit 1; done # Possibly use multi-threaded decoder thread_string= [ $num_threads -gt 1 ] && thread_string="-parallel --num-threads=$num_threads" # PREPARE FEATURE EXTRACTION PIPELINE # import config, cmvn_opts= delta_opts= D=$srcdir [ -e $D/norm_vars ] && cmvn_opts="--norm-means=true --norm-vars=$(cat $D/norm_vars)" # Bwd-compatibility, [ -e $D/cmvn_opts ] && cmvn_opts=$(cat $D/cmvn_opts) [ -e $D/delta_order ] && delta_opts="--delta-order=$(cat $D/delta_order)" # Bwd-compatibility, [ -e $D/delta_opts ] && delta_opts=$(cat $D/delta_opts) # # Create the feature stream, feats="ark,s,cs:copy-feats scp:$sdata/JOB/feats.scp ark:- |" # apply-cmvn (optional), [ ! -z "$cmvn_opts" -a ! -f $sdata/1/cmvn.scp ] && echo "$0: Missing $sdata/1/cmvn.scp" && exit 1 [ ! -z "$cmvn_opts" ] && feats="$feats apply-cmvn $cmvn_opts --utt2spk=ark:$sdata/JOB/utt2spk scp:$sdata/JOB/cmvn.scp ark:- ark:- |" # add-deltas (optional), [ ! -z "$delta_opts" ] && feats="$feats add-deltas $delta_opts ark:- ark:- |" # # Run the decoding in the queue, if [ $stage -le 0 ]; then $cmd --num-threads $((num_threads+1)) JOB=1:$nj $dir/log/decode.JOB.log \ ./src/nnet-forward $nnet_forward_opts --class-frame-counts=$class_frame_counts --use-gpu=$use_gpu $config "$feats" ark:- \| \ latgen-faster-mapped$thread_string --min-active=$min_active --max-active=$max_active --max-mem=$max_mem --beam=$beam \ --lattice-beam=$lattice_beam --acoustic-scale=$acwt --allow-partial=true --word-symbol-table=$graphdir/words.txt \ $model $graphdir/HCLG.fst ark:- "ark:|gzip -c > $dir/lat.JOB.gz" || exit 1; fi # Run the scoring if ! $skip_scoring ; then [ ! -x local/score.sh ] && \ echo "Not scoring because local/score.sh does not exist or not executable." && exit 1; local/score.sh $scoring_opts --cmd "$cmd" $data $graphdir $dir || exit 1; fi exit 0;