%Aigaion2 BibTeX export from Idiap Publications
%Tuesday 10 December 2024 07:35:47 PM

@INPROCEEDINGS{Henderson_ICLR_2023,
         author = {Henderson, James and Fehr, Fabio},
       keywords = {natural language, transformers, VAE, VIB},
       projects = {Idiap, EVOLANG},
          title = {A VAE for Transformers with Nonparametric Variational Information Bottleneck},
      booktitle = {The Eleventh International Conference on Learning Representations},
           year = {2023},
            url = {https://openreview.net/forum?id=6QkjC_cs03X},
       abstract = {We propose a Variational AutoEncoder (VAE) for Transformers by developing a Variational Information Bottleneck (VIB) regulariser for Transformer embeddings.  We formalise such attention-based representations as mixture distributions, and use Bayesian nonparametrics to develop a Nonparametric VIB (NVIB) for them.  The variable number of mixture components supported by nonparametrics captures the variable number of vectors supported by attention, and exchangeable distributions from nonparametrics capture the permutation invariance of attention.  Our Transformer VAE (NVAE) uses NVIB to regularise the information passing from the Transformer encoder to the Transformer decoder.  Evaluations of a NVAE, trained on natural language text, demonstrate that NVIB can regularise the number of mixture components in the induced embedding whilst maintaining generation quality and reconstruction capacity.},
            pdf = {https://publications.idiap.ch/attachments/papers/2023/Henderson_ICLR_2023.pdf}
}