%Aigaion2 BibTeX export from Idiap Publications
%Tuesday 07 May 2024 05:02:57 AM

@INPROCEEDINGS{gatica02d-conf,
         author = {Gatica-Perez, Daniel and Lathoud, Guillaume and McCowan, Iain A. and Odobez, Jean-Marc and Moore, Darren},
       projects = {Idiap},
          title = {Audio-Visual Speaker Tracking with Importance Particle Filters},
      booktitle = {IEEE International Conference on Image Processing (ICIP)},
           year = {2003},
       crossref = {gatica02d},
       abstract = {We present a probabilistic methodology for audio-visual (AV) speaker tracking, using an uncalibrated wide-angle camera and a microphone array. The algorithm fuses 2-D object shape and audio information via importance particle filters (I-PFs,',','),
 allowing for the asymmetrical integration of AV information in a way that efficiently exploits the complementary features of each modality. Audio localization information is used to generate an importance sampling (IS) function, which guides the random search process of a particle filter towards regions of the configuration space likely to contain the true configuration (a speaker). The measurement process integrates contour-based and audio observations, which results in reliable head tracking in realistic scenarios. We show that imperfect single modalities can be combined into an algorithm that automatically initializes and tracks a speaker, switches between multiple speakers, tolerates visual clutter, and recovers from total AV object occlusion, in the context of a multimodal meeting room.},
            pdf = {https://publications.idiap.ch/attachments/reports/2002/rr02-37.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2002/rr02-37.ps.gz},
ipdmembership={speech, vision},
}



crossreferenced publications: 
@TECHREPORT{gatica02d,
         author = {Gatica-Perez, Daniel and Lathoud, Guillaume and McCowan, Iain A. and Odobez, Jean-Marc and Moore, Darren},
       projects = {Idiap},
          title = {Audio-Visual Speaker Tracking with Importance Particle Filters},
           type = {Idiap-RR},
         number = {Idiap-RR-37-2002},
           year = {2002},
    institution = {IDIAP},
       abstract = {We present a probabilistic methodology for audio-visual (AV) speaker tracking, using an uncalibrated wide-angle camera and a microphone array. The algorithm fuses 2-D object shape and audio information via importance particle filters (I-PFs,',','),
 allowing for the asymmetrical integration of AV information in a way that efficiently exploits the complementary features of each modality. Audio localization information is used to generate an importance sampling (IS) function, which guides the random search process of a particle filter towards regions of the configuration space likely to contain the true configuration (a speaker). The measurement process integrates contour-based and audio observations, which results in reliable head tracking in realistic scenarios. We show that imperfect single modalities can be combined into an algorithm that automatically initializes and tracks a speaker, switches between multiple speakers, tolerates visual clutter, and recovers from total AV object occlusion, in the context of a multimodal meeting room.},
            pdf = {https://publications.idiap.ch/attachments/reports/2002/rr02-37.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2002/rr02-37.ps.gz},
ipdmembership={speech, vision},
}