%Aigaion2 BibTeX export from Idiap Publications
%Saturday 21 December 2024 05:25:50 PM

@ARTICLE{odobez:tip:05,
         author = {Odobez, Jean-Marc and Gatica-Perez, Daniel and Ba, Sil{\`{e}}ye O.},
       projects = {Idiap},
          month = {11},
          title = {Embedding Motion in Model-Based Stochastic Tracking},
        journal = {IEEE Transaction on Image Processing},
         volume = {15},
         number = {11},
           year = {2006},
           note = {IDIAP-RR 04-61},
       crossref = {odobez-rr-04-61},
       abstract = {Particle filtering (PF) is now established as one of the most popular methods for visual tracking. Within this framework, two assumptions are generally made. The first is that the data are temporally independent given the sequence of object states, and the second one is the use of the transition prior as proposal distribution. In this paper, we argue that the first assumption does not strictly hold and that the second can be improved. We propose to handle both modeling issues using motion. Explicit motion measurements are used to drive the sampling process towards the new interesting regions of the image, while implicit motion measurements are introduced in the likelihood evaluation to model the data correlation term. The proposed model allows to handle abrupt motion changes and to filter out visual distractors when tracking objects with generic models based on shape representations. Experimental results compared against the CONDENSATION algorithm have demonstrated superior tracking performance.},
ipdinar={2005},
ipdmembership={vision},
ipdpriority={2},
}



crossreferenced publications: 
@TECHREPORT{odobez-rr-04-61,
         author = {Odobez, Jean-Marc and Gatica-Perez, Daniel},
       projects = {Idiap},
          title = {Motion likelihood and proposal modeling in Model-Based Stochastic Tracking},
           type = {Idiap-RR},
         number = {Idiap-RR-61-2004},
           year = {2004},
    institution = {IDIAP},
        address = {Martigny, Switzerland},
           note = {This report was accepted for publication in IEEE Transaction on Image Processing, 2006},
       abstract = {Particle filtering is now established as one of the most popular methods for visual tracking. Within this framework, there are two important considerations. The first one refers to the generic assumption that the observations are temporally independent given the sequence of object states. The second consideration, often made in the literature, uses the transition prior as proposal distribution. Thus, the current observations are not taken into account, requesting the noise process of this prior to be large enough to handle abrupt trajectory changes. As a result, many particles are either wasted in low likelihood regions of the state space, resulting in low sampling efficiency, or more importantly, propagated to distractor regions of the image, resulting in tracking failures. In this paper, we propose to handle both considerations using motion. We first argue that in general observations are conditionally correlated, and propose a new model to account for this correlation allowing for the natural introduction of implicit and/or explicit motion measurements in the likelihood term. Secondly, explicit motion measurements are used to drive the sampling process towards the most likely regions of the state space. Overall, the proposed model allows to handle abrupt motion changes and to filter out visual distractors when tracking objects with generic models based on shape or color distribution. Experimental results obtained on head tracking, using several sequences with moving camera involving large dynamics, and compared against the CONDENSATION algorithm, have demonstrated superior tracking performance of our approach.},
            pdf = {https://publications.idiap.ch/attachments/reports/2004/rr04-61.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/2004/rr04-61.ps.gz},
ipdinar={2004},
ipdmembership={vision},
language={English},
}