%Aigaion2 BibTeX export from Idiap Publications %Thursday 21 November 2024 01:22:43 PM @INPROCEEDINGS{Okada_ICMI_2015, author = {Okada, Shogo and Aran, Oya and Gatica-Perez, Daniel}, projects = {Idiap, UBIMPRESSED}, month = nov, title = {Personality Trait Classification via Co-Occurrent Multiparty Multimodal Event Discovery}, booktitle = {Proceedings of the ACM International Conference on Multimodal Interaction}, series = {ICMI '15}, year = {2015}, pages = {15-22}, publisher = {ACM}, location = {Seattle, Washington, USA}, isbn = {978-1-4503-3912-4}, doi = {10.1145/2818346.2820757}, abstract = {This paper proposes a novel feature extraction framework from mutli-party multimodal conversation for inference of personality traits and emergent leadership. The proposed framework represents multi modal features as the combination of each participant’s nonverbal activity and group activity. This feature representationenables to compare the nonverbal patterns extracted from the participants of different groups in a metric space. It captures how the target member outputs nonverbal behavior observed in a group (e.g. the member speaks while all members move their body), and can be available for any kind of multiparty conversation task. Frequent co-occurrent events are discovered using graph clustering from multimodal sequences. The proposed framework is applied for the ELEA corpus which is an audio visual dataset collected from groupmeetings. We evaluate the framework for binary classification task of 10 personality traits. Experimental results show that the model trained with co-occurrence features obtained higher accuracy than previously related work in 8 out of 10 traits. In addition, the co-occurrence features improve the accuracy from 2\% up to 17\%.}, pdf = {https://publications.idiap.ch/attachments/papers/2015/Okada_ICMI_2015.pdf} }