## DNN hybrid system training parameters dnn_num_hidden_layers=3 dnn_input_dim=2000 dnn_output_dim=200 dnn_init_learning_rate=0.008 dnn_final_learning_rate=0.0008 dnn_mixup=5000 num_epochs=15 num_epochs_extra=5 num_iters_final=20 babel_type=limited # Supervised tuning options # To update only the last layer using only the supervised data after # semi-supervised training is done do_supervised_tuning=true dnn_update_cpu_parallel_opts=(--minibatch-size 128 --num-jobs-nnet 8 --num-threads 16 \ --parallel-opts "-pe smp 16" --cmd "queue.pl -l arch=*64 -l mem_free=2G,ram_free=1G") dnn_update_gpu_parallel_opts=(--minibatch-size 512 --num-jobs-nnet 4 --num-threads 1 \ --parallel-opts "-l gpu=1" --cmd "queue.pl -l arch=*64 -l mem_free=2G,ram_free=1G") # Semi-supervised examples options egs_gpu_opts=(--splice-width 4 --samples-per-iter 200000 --num-jobs-nnet 4 --cmd "queue.pl -l arch=*64 -l mem_free=2G,ram_free=1G") egs_cpu_opts=(--splice-width 4 --samples-per-iter 200000 --num-jobs-nnet 8 --cmd "queue.pl -l arch=*64 -l mem_free=2G,ram_free=1G") egs_io_opts="-tc 5" weight_threshold=0.7