%Aigaion2 BibTeX export from Idiap Publications %Saturday 21 December 2024 04:50:12 PM @INPROCEEDINGS{Tong_INTERSPEECH_2017, author = {Tong, Sibo and Garner, Philip N. and Bourlard, Herv{\'{e}}}, keywords = {adaptive training, dnn, multilingual ASR}, projects = {Idiap, SUMMA}, month = aug, title = {An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation}, booktitle = {Proc. of Interspeech}, year = {2017}, abstract = {Different training and adaptation techniques for multilingual Automatic Speech Recognition (ASR) are explored in the context of hybrid systems, exploiting Deep Neural Networks (DNN) and Hidden Markov Models (HMM). In multilingual DNN training, the hidden layers (possibly extracting bottleneck features) are usually shared across languages, and the output layer can either model multiple sets of language-specific senones or one single universal IPA-based multilingual senone set. Both architectures are investigated, exploiting and comparing different language adaptive training (LAT) techniques originating from successful DNN-based speaker-adaptation. More specifically, speaker adaptive training methods such as Cluster Adaptive Training (CAT) and Learning Hidden Unit Contribution (LHUC) are considered. In addition, a language adaptive output architecture for IPA-based universal DNN is also studied and tested. Experiments show that LAT improves the performance and adaptation on the top layer further improves the accuracy. By combining state-level minimum Bayes risk (sMBR) sequence training with LAT, we show that a language adaptively trained IPA-based universal DNN outperforms a monolingually sequence trained model.}, pdf = {https://publications.idiap.ch/attachments/papers/2017/Tong_INTERSPEECH_2017.pdf} }