%Aigaion2 BibTeX export from Idiap Publications
%Sunday 05 May 2024 02:34:57 PM

@INPROCEEDINGS{Kuzborskij_ICML_2013,
         author = {Kuzborskij, Ilja and Orabona, Francesco},
       keywords = {domain adaptation, learning theory, leave-one-out, regularized least squares, stability, transfer learning},
       projects = {Idiap, NINAPRO},
          month = jun,
          title = {Stability and Hypothesis Transfer Learning},
      booktitle = {International Conference on Machine Learning},
           year = {2013},
       abstract = {We consider the transfer learning scenario, where the learner does not have access to the source domain directly, but rather operates on the basis of hypotheses induced from it - the Hypothesis Transfer Learning (HTL) problem. Particularly, we conduct a theoretical analysis of HTL by considering the algorithmic stability of a class of HTL algorithms based on Regularized Least Squares with biased regularization. We show that the relatedness of source and target domains accelerates the convergence of the Leave-One-Out error to the generalization error, thus enabling the use of the Leave-One-Out error to find the optimal transfer parameters, even in the presence of a small training set. In case of unrelated domains we also suggest a theoretically principled way to prevent negative transfer, so that in the limit we recover the performance of the algorithm not using any knowledge from the source domain.},
            pdf = {https://publications.idiap.ch/attachments/papers/2013/Kuzborskij_ICML_2013.pdf}
}