%Aigaion2 BibTeX export from Idiap Publications %Saturday 21 December 2024 06:15:34 PM @INPROCEEDINGS{VILLATORO-TELLO_ICASSP2023-2_2023, author = {Villatoro-Tello, Esa{\'{u}} and Madikeri, Srikanth and Zuluaga-Gomez, Juan and Sharma, Bidisha and Sarfjoo, Seyyed Saeed and Iuliia, Nigmatulina and Motlicek, Petr and Ivanov, Alexei V. and Ganapathiraju, Aravind}, keywords = {Cross-modal Attention, Human-Computer Interaction, speech recognition, Spoken Language Understanding, Word Consensus Networks}, projects = {Idiap, UNIPHORE}, month = mar, title = {Effectiveness of Text, Acoustic, and Lattice-based representations in Spoken Language Understanding tasks}, booktitle = {Proceedings of the 2023 IEEE International Conference on Acoustics, Speech and Signal Processing}, year = {2023}, abstract = {In this paper, we perform an exhaustive evaluation of different representations to address the intent classification problem in a Spoken Language Understanding (SLU) setup. We benchmark three types of systems to perform the SLU intent detection task: 1) text-based, 2) lattice-based, and a novel 3) multimodal approach. Our work provides a comprehensive analysis of what could be the achievable performance of different state-of-the-art SLU systems under different circumstances, e.g., automatically- vs. manually-generated transcripts. We evaluate the systems on the publicly available SLURP spoken language resource corpus. Our results indicate that using richer forms of Automatic Speech Recognition (ASR) outputs, namely word-consensus-networks, allows the SLU system to improve in comparison to the 1-best setup (5.5\% relative improvement). However, crossmodal approaches, i.e., learning from acoustic and text embeddings, obtains performance similar to the oracle setup, a relative improvement of 17.8\% over the 1-best configuration, being a recommended alternative to overcome the limitations of working with automatically generated transcripts.}, pdf = {https://publications.idiap.ch/attachments/papers/2023/VILLATORO-TELLO_ICASSP2023-2_2023.pdf} }