%Aigaion2 BibTeX export from Idiap Publications
%Thursday 18 April 2024 12:18:39 PM

         author = {Siegfried, Remy and Yu, Yu and Odobez, Jean-Marc},
       keywords = {blink, Convolutional neural network., eye movements, remote sensing, saccade, video processing},
       projects = {Idiap, MUMMER},
          month = jun,
          title = {A Deep Learning Approach for Robust Head Pose Independent Eye Movements Recognition from Videos},
      booktitle = {2019 ACM Symposium on Eye Tracking Research and Applications},
           year = {2019},
          pages = {5},
      publisher = {ACM},
           isbn = {978-1-4503-6709-7/19/06},
            doi = {10.1145/3314111.3319844},
       abstract = {Recognizing eye movements is important for gaze behavior understanding like in human communication analysis (human-human or robot interactions)  or for diagnosis (medical, reading impairments). In this paper, we address this task using remote RGB-D sensors to analyze people behaving in natural conditions. This is very challenging given that such sensors have a normal sampling rate of 30 Hz and provide low-resolution eye images (typically 36x60 pixels), and natural scenarios introduce many variabilities in illumination, shadows, head pose, and dynamics. Hence gaze signals one can extract in these conditions have lower precision compared to dedicated IR eye trackers, rendering previous methods less appropriate for the task. To tackle these challenges, we propose a deep learning method that directly processes the eye image video streams to classify them into fixation, saccade, and blink classes, and allows to distinguish irrelevant noise (illumination, low-resolution artifact, inaccurate eye alignment, difficult eye shapes) from true eye motion signals. Experiments on natural 4-party interactions demonstrate the benefit of our approach compared to previous methods, including deep learning models applied to gaze outputs.},
            pdf = {https://publications.idiap.ch/attachments/papers/2019/Siegfried_ETRA_2019.pdf}