%Aigaion2 BibTeX export from Idiap Publications
%Tuesday 30 April 2024 03:15:58 AM

@INPROCEEDINGS{Luo_CVPR-OLCV_2010,
         author = {Luo, Jie and Orabona, Francesco and Fornoni, Marco and Caputo, Barbara and Cesa-Bianchi, Nicolo},
       projects = {Idiap, DIRAC},
          month = jun,
          title = {OM-2: An Online Multi-class Multi-kernel Learning Algorithm},
      booktitle = {In Proceeding of CVPR 2010, Online Learning for Computer Vision Workshop},
           year = {2010},
       crossref = {Luo_Idiap-RR-06-2010},
       abstract = {Efficient learning from massive amounts of information is a hot topic in computer vision. Available training sets contain many examples with several visual descriptors, a setting in which current batch approaches are typically slow and does not scale well. In this work we introduce a theo- retically motivated and efficient online learning algorithm for the Multi Kernel Learning (MKL) problem. For this algorithm we prove a theoretical bound on the number of multiclass mistakes made on any arbitrary data sequence. Moreover, we empirically show that its performance is on par, or better, than standard batch MKL (e.g. SILP, Sim- pleMKL) algorithms.},
            pdf = {https://publications.idiap.ch/attachments/papers/2011/Luo_CVPR-OLCV_2010.pdf}
}



crossreferenced publications: 
@TECHREPORT{Luo_Idiap-RR-06-2010,
         author = {Luo, Jie and Orabona, Francesco and Fornoni, Marco and Caputo, Barbara and Cesa-Bianchi, Nicolo},
       projects = {Idiap},
          month = {4},
          title = {OM-2: An Online Multi-class Multi-kernel Learning Algorithm},
           type = {Idiap-RR},
         number = {Idiap-RR-06-2010},
           year = {2010},
    institution = {Idiap},
       abstract = {Efficient learning from massive amounts of information is a hot topic in computer vision. Available training sets contain many examples with several visual descriptors, a setting in which current batch approaches are typically slow and does not scale well. In this work we introduce a theo- retically motivated and efficient online learning algorithm for the Multi Kernel Learning (MKL) problem. For this algorithm we prove a theoretical bound on the number of multiclass mistakes made on any arbitrary data sequence. Moreover, we empirically show that its performance is on par, or better, than standard batch MKL (e.g. SILP, Sim- pleMKL) algorithms.},
            pdf = {https://publications.idiap.ch/attachments/reports/2010/Luo_Idiap-RR-06-2010.pdf}
}