%Aigaion2 BibTeX export from Idiap Publications
%Sunday 28 April 2024 12:16:18 PM

@INPROCEEDINGS{odobez-icpr-2004,
         author = {Odobez, Jean-Marc and Gatica-Perez, Daniel},
       projects = {Idiap},
          month = {8},
          title = {Embedding motion in model-based stochastic tracking},
      booktitle = {17th Int. Conf. Pattern Recognition (ICPR)},
         volume = {2},
           year = {2004},
        address = {Cambridge, UK},
           note = {Similar to RR-03-72.},
       crossref = {odobez-rr-03-72},
       abstract = {Particle filtering (PF) is now established as one of the most popular methods for visual tracking. Within this framework, two assumptions are generally made. The first is that the data are temporally independent given the sequence of object states, and the second one is the use of the transition prior as proposal distribution. In this paper, we argue that the first assumption does not strictly hold and that the second can be improved. We propose to handle both modeling issues using motion. Explicit motion measurements are used to drive the sampling process towards the new interesting regions of the image, while implicit motion measurements are introduced in the likelihood evaluation to model the data correlation term. The proposed model allows to handle abrupt motion changes and to filter out visual distractors when tracking objects with generic models based on shape representations. Experimental results compared against the CONDENSATION algorithm have demonstrated superior tracking performance.},
            pdf = {https://publications.idiap.ch/attachments/reports/2004/odobez_2004_icpr.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2004/odobez_2004_icpr.ps.gz},
ipdinar={2004},
ipdmembership={vision},
ipdpriority={2},
}



crossreferenced publications: 
@TECHREPORT{odobez-rr-03-72,
         author = {Odobez, Jean-Marc and Gatica-Perez, Daniel and Ba, Sil{\`{e}}ye O.},
       projects = {Idiap},
          title = {Embedding Motion in Model-Based Stochastic Tracking},
           type = {Idiap-RR},
         number = {Idiap-RR-72-2003},
           year = {2003},
    institution = {IDIAP},
        address = {Martigny, Switzerland},
           note = {Published in International Conference on Pattern Recognition (ICPR,',','),
 2004},
       abstract = {Particle filtering is now established as one of the most popular method for visual tracking. Within this framework, two assumptions are generally made. The first is that the data are temporally independent given the sequence of object states. In this paper, we argue that in general the data are correlated, and that modeling such dependency should improve tracking robustness. The second assumption consists of the use of the transition prior as proposal distribution. Thus, the current observation data is not taken into account, requesting the noise process of this prior to be large enough to handle abrupt trajectory changes. Therefore, many particles are either wasted in low likelihood area, resulting in a low efficiency of the sampling, or, more importantly, propagated on near distractor regions of the image, resulting in tracking failures. In this paper, we propose to handle both issues using motion. Explicit motion measurements are used to drive the sampling process towards the new interesting regions of the image, while implicit motion measurements are introduced in the likelihood evaluation to model the data correlation term. The proposed model allows to handle abrupt motion changes and to filter out visual distractors when tracking objects with generic models based on shape or color distribution representations. Experimental results compared against the CONDENSATION algorithm have demonstrated superior tracking performance.},
            pdf = {https://publications.idiap.ch/attachments/reports/2003/rr03-72.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2003/rr03-72.ps.gz},
ipdinar={2003},
ipdmembership={vision},
language={English},
}