%Aigaion2 BibTeX export from Idiap Publications %Sunday 22 December 2024 03:31:29 PM @INPROCEEDINGS{Siegfried_ICMI17-2_2017, author = {Siegfried, Remy and Yu, Yu and Odobez, Jean-Marc}, keywords = {Appearance based model, bias correction, Gaze estimation, person invariance, RGB-D cameras}, projects = {UBIMPRESSED, MUMMER}, month = nov, title = {Towards the Use of Social Interaction Conventions as Prior for Gaze Model Adaptation}, booktitle = {Proceedings of 19th ACM International Conference on Multimodal Interaction}, year = {2017}, pages = {9}, publisher = {ACM}, isbn = {978-1-4503-5543-8/17/11}, doi = {10.1145/3136755.3136793}, abstract = {Gaze is an important non-verbal cue involved in many facets of social interactions like communication, attentiveness or attitudes. Nevertheless, extracting gaze directions visually and remotely usually suffers large errors because of low resolution images, inaccurate eye cropping, or large eye shape variations across the population, amongst others. This paper hypothesizes that these challenges can be addressed by exploiting multimodal social cues for gaze model adaptation on top of an head-pose independent 3D gaze estimation framework. First, a robust eye cropping refinement is achieved by combining a semantic face model with eye landmark detections. Investigations on whether temporal smoothing can overcome instantaneous refinement limitations is conducted. Secondly, to study whether social interaction convention could be used as priors for adaptation, we exploited the speaking status and head pose constraints to derive soft gaze labels and infer person-specific gaze bias using robust statistics. Experimental results on gaze coding in natural interactions from two different settings demonstrate that the two steps of our gaze adaptation method contribute to reduce gaze errors by a large margin over the baseline and can be generalized to several identities in challenging scenarios.}, pdf = {https://publications.idiap.ch/attachments/papers/2017/Siegfried_ICMI17-2_2017.pdf} }