%Aigaion2 BibTeX export from Idiap Publications
%Monday 07 October 2024 07:04:56 PM

@INPROCEEDINGS{Juan_SLT2023-2_2023,
         author = {Juan, Zuluaga-Gomez. and Prasad, Amrutha and Iuliia, Nigmatulina and Sarfjoo, Seyyed Saeed and Motlicek, Petr and Kleinert, Matthias and Helmke, Hartmut and Ohneiser, Oliver and Zhan, Qingran},
       keywords = {air traffic control communications, Automatic Speech Recognition, self-supervised pre-training, wav2vec 2.0},
       projects = {Idiap, HAAWAII, EC H2020- ATCO2},
          month = jan,
          title = {How Does Pre-trained Wav2Vec 2.0 Perform on Domain-Shifted ASR? An Extensive Benchmark on Air Traffic Control Communications},
      booktitle = {2023 IEEE Spoken Language Technology Workshop (SLT)},
         series = {1},
         volume = {1},
         number = {1},
           year = {2023},
   organization = {IEEE},
            url = {https://arxiv.org/abs/2203.16822},
       abstract = {Recent work on self-supervised pre-training focus on leveraging large-scale unlabeled speech data to build robust end-to-end (E2E) acoustic models (AM) that can be later fine-tuned on downstream tasks e.g., automatic speech recognition (ASR). Yet, few works investigated the impact on performance when the data properties substantially differ between the pre-training and fine-tuning phases, termed domain shift. We target this scenario by analyzing the robustness of Wav2Vec 2.0 and XLS-R models on downstream ASR for a completely unseen domain, air traffic control (ATC) communications. We benchmark these two models on several open-source and challenging ATC databases with signal-to-noise ratio between 5 and 20 dB. Relative word error rate (WER) reductions between 20\% to 40\% are obtained in comparison to hybrid-based ASR baselines by only fine-tuning E2E acoustic models with a smaller fraction of labeled data. We analyze WERs on the low-resource scenario and gender bias carried by one ATC dataset.},
            pdf = {https://publications.idiap.ch/attachments/papers/2022/Juan_SLT2023-2_2023.pdf}
}