%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 03:36:07 PM

@TECHREPORT{Yu_Idiap-RR-09-2017,
         author = {Yu, Yu and Funes Mora, Kenneth Alberto and Odobez, Jean-Marc},
       projects = {UBIMPRESSED, MUMMER},
          month = {2},
          title = {Robust and Accurate 3D Head Pose Estimation through 3DMM and Online Head Model Reconstruction},
           type = {Idiap-RR},
         number = {Idiap-RR-09-2017},
           year = {2017},
    institution = {Idiap},
       crossref = {Yu_FG2017_2017},
            pdf = {https://publications.idiap.ch/attachments/reports/2017/Yu_Idiap-RR-09-2017.pdf}
}



crossreferenced publications: 
@INPROCEEDINGS{Yu_FG2017_2017,
         author = {Yu, Yu and Funes Mora, Kenneth Alberto and Odobez, Jean-Marc},
       projects = {UBIMPRESSED, MUMMER},
          title = {Robust and Accurate 3D Head Pose Estimation through 3DMM and Online Head Model Reconstruction},
      booktitle = {Proceedings of the 12th IEEE International Conference on Automatic Face and Gesture Recognition (FG 2017)},
           year = {2017},
       crossref = {Yu_Idiap-RR-09-2017},
       abstract = {Accurate and robust 3D head pose estimation is
important for face related analysis. Though high accuracy has
been achieved by previous works based on 3D morphable model
(3DMM), their performance drops with extreme head poses
because such models usually only represent the frontal face
region. In this paper, we present a robust head pose estimation
framework by complementing a 3DMM model with an online
3D reconstruction of the full head providing more support when
handling extreme head poses. The approach includes a robust on-
line 3DMM fitting step based on multi-view observation samples
as well as smooth and face-neutral synthetic samples generated
from the reconstructed 3D head model. Experiments show that
our framework achieves state-of-the-art pose estimation accuracy
on the BIWI dataset, and has robust performance for extreme
head poses when tested on natural interaction sequences.},
            pdf = {https://publications.idiap.ch/attachments/papers/2017/Yu_FG2017_2017.pdf}
}