%Aigaion2 BibTeX export from Idiap Publications
%Monday 29 April 2024 12:53:17 AM

@TECHREPORT{Moerland-99.1a,
         author = {Moerland, Perry and Mayoraz, Eddy},
       projects = {Idiap},
          title = {DynaBoost: Combining Boosted Hypotheses in a Dynamic Way},
           type = {Idiap-RR},
         number = {Idiap-RR-09-1999},
           year = {1999},
    institution = {IDIAP},
       abstract = {We present an extension of Freund and Schapire's AdaBoost algorithm that allows an input-dependent combination of the base hypotheses. A separate weak learner is used for determining the input-dependent weights of each hypothesis. The error function minimized by these additional weak learners is a margin cost function that has also been shown to be minimized by AdaBoost. The weak learners used for dynamically combining the base hypotheses are simple perceptrons. We compare our dynamic combination model with AdaBoost on a range of binary and multi-class classification problems. It is shown that the dynamic approach significantly improves the results on most data sets when (rather weak) perceptron base hypotheses are used, while the difference in performance is small when the base hypotheses are MLPs.},
            pdf = {https://publications.idiap.ch/attachments/reports/1999/rr99-09.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/reports/1999/rr99-09.ps.gz},
ipdmembership={learning},
}