%Aigaion2 BibTeX export from Idiap Publications
%Sunday 13 July 2025 01:23:11 PM

@ARTICLE{Kuzborskij_MLJ_2016,
                      author = {Kuzborskij, Ilja and Orabona, Francesco},
                    projects = {Idiap},
                       title = {Fast Rates by Transferring from Auxiliary Hypotheses},
                     journal = {Machine Learning},
                        year = {2016},
                    abstract = {In this work we consider the learning setting where, in addition to the training set, the learner receives a collection of auxiliary hypotheses originating from other tasks. We focus on a broad class of ERM-based linear algorithms that can be instantiated with any non-negative smooth loss function and any strongly convex regularizer. We establish generalization and excess risk bounds, showing that, if the algorithm is fed with a good combination of source hypotheses, generalization happens at the fast rate O(1/m) instead of the usual O(1/sqrt(m)). On the other hand, if the source hypotheses combination is a misfit for the target task, we recover the usual learning rate. As a byproduct of our study, we also prove a new bound on the Rademacher complexity of the smooth loss class under weaker assumptions compared to previous works.}
}