%Aigaion2 BibTeX export from Idiap Publications
%Monday 29 April 2024 01:10:39 PM

@INPROCEEDINGS{Siegfried_ECEM_2017,
         author = {Siegfried, Remy and Odobez, Jean-Marc},
       projects = {Idiap, UBIMPRESSED},
          month = sep,
          title = {Supervised Gaze Bias Correction for Gaze Coding in Interactions},
      booktitle = {ECEM COGAIN Symposium},
           year = {2017},
          pages = {3},
       crossref = {Siegfried_Idiap-RR-23-2017},
       abstract = {Understanding the role of gaze in conversations and social interactions or exploiting it for HRI applications is an ongoing research subject. In these contexts, vision-based eye trackers are preferred as they are non-invasive and allow people to behave more naturally. In particular, appearance-based methods (ABM) are very promising, as they can perform online gaze estimation and have the potential to be head pose and person invariant, accommodate more situations as well as user mobility and the resulting low-resolution images. However, they may also suffer from a lack of robustness when several of these challenges are jointly present. In this work, we address gaze coding in human-human interactions and present a simple method based on a few manually annotated frames that is able to much reduce the error of a head pose invariant ABM method, as shown on a dataset of 6 interactions.},
            pdf = {https://publications.idiap.ch/attachments/papers/2018/Siegfried_ECEM_2017.pdf}
}



crossreferenced publications: 
@TECHREPORT{Siegfried_Idiap-RR-23-2017,
         author = {Siegfried, Remy and Odobez, Jean-Marc},
       keywords = {appearance model, attention, bias correction, eye tracking, Gaze, usability},
       projects = {Idiap, UBIMPRESSED},
          month = {9},
          title = {Supervised Gaze Bias Correction for Gaze Coding in Interactions},
           type = {Idiap-RR},
         number = {Idiap-RR-23-2017},
           year = {2017},
    institution = {Idiap},
       abstract = {Understanding the role of gaze in conversations and social interactions or exploiting it for
HRI applications is an ongoing research subject. In these contexts, vision based eye trackers
are preferred as they are non-invasive and allow people to behave more naturally. In particular,
appearance based methods (ABM) are very promising, as they can perform online gaze estima-
tion and have the potential to be head pose and person invariant, accommodate more situations
as well as user mobility and the resulting low resolution images. However, they may also suffer
from a lack of robustness when several of these challenges are jointly present. In this work,
we address gaze coding in human-human interactions, and present a simple method based on
a few manually annotated frames that is able to much reduce the error of a head pose invariant
ABM method, as shown on a dataset of 6 interactions.},
            pdf = {https://publications.idiap.ch/attachments/reports/2017/Siegfried_Idiap-RR-23-2017.pdf}
}