%Aigaion2 BibTeX export from Idiap Publications %Thursday 21 November 2024 01:10:56 PM @TECHREPORT{ElShafey_Idiap-RR-07-2013, author = {El Shafey, Laurent and McCool, Chris and Wallace, Roy and Marcel, S{\'{e}}bastien}, projects = {Idiap}, month = {3}, title = {A Scalable Formulation of Probabilistic Linear Discriminant Analysis: Applied to Face Recognition}, type = {Idiap-RR}, number = {Idiap-RR-07-2013}, year = {2013}, institution = {Idiap}, note = {Accepted for publication}, url = {https://pypi.python.org/pypi/xbob.paper.tpami2013}, abstract = {In this paper we present a scalable and exact solution for probabilistic linear discriminant analysis (PLDA). PLDA is a probabilistic model that has been shown to provide state-of-the-art performance for both face and speaker recognition. However, it has one major drawback, at training time estimating the latent variables requires the inversion and storage of a matrix whose size grows quadratically with the number of samples for the identity (class). To date two approaches have been taken to deal with this problem, to: i) use an exact solution which calculates this large matrix and is obviously not scalable with the number of samples or ii) derive a variational approximation to the problem. We present a scalable derivation which is theoretically equivalent to the previous non-scalable solution and so obviates the need for a variational approximation. Experimentally, we demonstrate the efficacy of our approach in two ways. First, on Labelled Faces in the Wild we illustrate the equivalence of our scalable implementation with previously published work. Second, on the large Multi-PIE database, we illustrate the gain in performance when using more training samples per identity (class), which is made possible by the proposed scalable formulation of PLDA.}, pdf = {https://publications.idiap.ch/attachments/reports/2013/ElShafey_Idiap-RR-07-2013.pdf} }