%Aigaion2 BibTeX export from Idiap Publications
%Monday 15 April 2024 11:32:43 AM

         author = {Liu, Gang and Yu, Yu and Odobez, Jean-Marc},
       projects = {Idiap, MUMMER, REGENN},
          title = {A Differential Approach for Gaze Estimation},
        journal = {IEEE Transaction on Pattern Analysis and Machine Intelligence},
         volume = {43},
         number = {3},
           year = {2021},
          pages = {1092--1098},
            url = {https://ieeexplore.ieee.org/document/8920005},
            doi = {https://doi.org/10.1109/TPAMI.2019.2957373},
       abstract = {Most non-invasive gaze estimation methods regress gaze directions directly from a single face or eye image. However, due to important variabilities in eye shapes and inner eye structures amongst individuals, universal models obtain limited accuracies and their output usually exhibit high variance as well as subject dependent biases. Thus, increasing accuracy is usually done through calibration, allowing gaze predictions for a subject to be mapped to her actual gaze. In this paper, we introduce a novel approach, which works by directly training a differential convolutional neural network to predict gaze differences between two eye input images of the same subject. Then, given a set of subject specific calibration images, we can use the inferred differences to predict the gaze direction of a novel eye sample. The assumption is that by comparing eye images of the same user, annoyance factors (alignment, eyelid closing, illumination perturbations) which usually plague single image prediction methods can be much reduced, allowing better prediction altogether. Furthermore, the differential network itself can be adapted via finetuning to make predictions consistent with the available user reference pairs. Experiments on 3 public datasets validate our approach which constantly outperforms state-of-the-art methods even when using only one calibration sample or those relying on subject specific gaze adaptation.},
            pdf = {https://publications.idiap.ch/attachments/papers/2019/Liu_PAMI_2020.pdf}