%Aigaion2 BibTeX export from Idiap Publications
%Friday 03 May 2024 01:32:58 AM

@INPROCEEDINGS{Ramet_SLT_2018,
         author = {Ramet, Gaetan and Garner, Philip N. and Baeriswyl, Michael and Lazaridis, Alexandros},
       projects = {SUMMA},
          month = dec,
          title = {CONTEXT-AWARE ATTENTION MECHANISM FOR SPEECH EMOTION RECOGNITION},
      booktitle = {IEEE Workshop on Spoken Language Technology},
           year = {2018},
          pages = {126-131},
       location = {Athens, Greece},
           isbn = {978-1-5386-4333-4},
            url = {http://www.slt2018.org/},
       abstract = {In this work, we study the use of attention mechanisms to enhance the performance of the state-of-the-art deep learning model in Speech Emotion Recognition (SER). We introduce a new Long Short-Term Memory (LSTM)-based neural network attention model which is able to take into account the temporal information in speech during the computation of the attention vector. The proposed LSTM-based model is evaluated on the IEMOCAP dataset using a 5-fold cross-validation scheme and achieved 68.8\% weighted accuracy on 4 classes, which outperforms the state-of-the-art models.},
            pdf = {https://publications.idiap.ch/attachments/papers/2018/Ramet_SLT_2018.pdf}
}