%Aigaion2 BibTeX export from Idiap Publications %Thursday 21 November 2024 05:52:48 AM @ARTICLE{Siegfried_TOMM_2021, author = {Siegfried, Remy and Odobez, Jean-Marc}, keywords = {conversation, Gaze estimation, manipulation, online calibration., remote sensor, RGB-D camera, unsupervised calibration, visual focus of attention}, projects = {Idiap, MUMMER}, month = jan, title = {Robust Unsupervised Gaze Calibration using Conversation and Manipulation Attention Priors}, journal = {ACM Transactions on Multimedia Computing, Communications, and Applications}, volume = {18}, number = {1}, year = {2022}, pages = {26}, issn = {1551-6857}, url = {https://doi.org/10.1145/3472622}, doi = {10.1145/3472622}, abstract = {Gaze estimation is a difficult task, even for humans. However, as humans, we are good at understanding a situation and exploiting it to guess the expected visual focus of attention (VFOA) of people, and we usually use this information to retrieve people’s gaze. In this paper, we propose to leverage such situation-based expectation about people’s VFOA to collected weakly labeled gaze samples and perform person-specific calibration of gaze estimators in an unsupervised and online way. In this context, our contributions are the following: i) we show how task contextual attention priors can be used to gather reference gaze samples, which is a cumbersome process otherwise; ii) we propose a robust estimation framework to exploit these weak labels for the estimation of the calibration model parameters; iii) we demonstrate the applicability of this approach on two Human-Human and Human-Robot interaction settings, namely conversation, and manipulation. Experiments on three datasets validate our approach, providing insights on the effectiveness of the prior and on the impact of different calibration models, in particular the usefulness of taking head pose into account.}, pdf = {https://publications.idiap.ch/attachments/papers/2022/Siegfried_TOMM_2021.pdf} }