%Aigaion2 BibTeX export from Idiap Publications
%Sunday 23 June 2024 06:20:32 AM

         author = {Moerland, Perry},
       projects = {Idiap},
          title = {Classification using localized mixtures of experts},
      booktitle = {Proceedings of the International Conference on Artificial Neural Networks (ICANN'99)},
         volume = {2},
           year = {1999},
      publisher = {London: IEE},
           note = {(IDIAP-RR 98-14)},
       crossref = {moerland-98.2},
       abstract = {A mixture of experts consists of a gating network that learns to partition the input space and of experts networks attributed to these different regions. This paper focuses on the choice of the gating network. First, a localized gating network based on a mixture of linear latent variable models is proposed that extends a gating network introduced by Xu et al, based on Gaussian mixture models. It is shown that this localized mixture of experts model, can be trained with the Expectation Maximization algorithm. The localized model is compared on a set of classification problems, with mixtures of experts having single or multi-layer perceptrons as gating network. It is found that the standard mixture of experts with feed-forward networks as gate often outperforms the other models.},
            pdf = {https://publications.idiap.ch/attachments/papers/1999/moerland-localized98.pdf},
     postscript = {ftp://ftp.idiap.ch/pub/papers/neural/moerland.localized.ps.gz},

crossreferenced publications: 
         author = {Moerland, Perry},
       projects = {Idiap},
          title = {Localized mixtures of experts},
           type = {Idiap-RR},
         number = {Idiap-RR-14-1998},
           year = {1998},
    institution = {IDIAP},