%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 11:39:25 AM

@INPROCEEDINGS{Oertel_ICMI_2015,
         author = {Oertel, Catharine and Funes Mora, Kenneth Alberto and Gustafson, Joakim and Odobez, Jean-Marc},
       keywords = {eye-gaze, listener categories, non-verbal cues},
       projects = {Idiap, TRACOME},
          month = nov,
          title = {Deciphering the Silent Participant. On the Use of Audio-Visual Cues for the Classification of Listener Categories in Group Discussions},
      booktitle = {Proceedings of the 2015 ACM on International Conference on Multimodal Interaction},
         series = {ICMI '15},
           year = {2015},
          pages = {107--114},
      publisher = {ACM},
       location = {Seattle, Washington, USA},
   organization = {ACM},
        address = {New York, NY, USA},
            doi = {10.1145/2818346.2820759},
       abstract = {Estimating a silent participant’s degree of engagement and his role within a group discussion can be challenging, as there are no speech related cues available at the given time. Having this information available, however, can provide important insights into the dynamics of the group as a whole. In this paper, we study the classification of listeners into several categories (attentive listener, side participant and bystander). We devised a thin-sliced perception test where subjects were asked to assess listener roles and engagement levels in 15-second video-clips taken from a corpus of group interviews. Results show that humans are usually able to assess silent participant roles. Using the annotation to identify from a set of multimodal low-level features, such as past speaking activity, backchannels (both visual and verbal), as well as gaze patterns, we could identify the features which are able to distinguish between different listener categories. Moreover, the results show that many of the audio- visual effects observed on listeners in dyadic interactions, also hold for multi-party interactions. A preliminary classifier achieves an accuracy of 64\%.},
            pdf = {https://publications.idiap.ch/attachments/papers/2015/Oertel_ICMI_2015.pdf}
}