%Aigaion2 BibTeX export from Idiap Publications
%Thursday 21 November 2024 12:29:30 PM

@ARTICLE{Henderson_NVIB_2022,
         author = {Henderson, James and Fehr, Fabio},
       projects = {Idiap, EVOLANG},
          title = {A Variational AutoEncoder for Transformers with Nonparametric Variational Information Bottleneck},
        journal = {arxiv},
           year = {2022},
            url = {https://arxiv.org/abs/2207.13529},
            doi = {10.48550/ARXIV.2207.13529},
       abstract = {We propose a VAE for Transformers by developing a variational information bottleneck regulariser for Transformer embeddings. We formalise the embedding space of Transformer encoders as mixture probability distributions, and use Bayesian nonparametrics to derive a nonparametric variational information bottleneck (NVIB) for such attention-based embeddings. The variable number of mixture components supported by nonparametric methods captures the variable number of vectors supported by attention, and the exchangeability of our nonparametric distributions captures the permutation invariance of attention. This allows NVIB to regularise the number of vectors accessible with attention, as well as the amount of information in individual vectors. By regularising the cross-attention of a Transformer encoder-decoder with NVIB, we propose a nonparametric variational autoencoder (NVAE). Initial experiments on training a NVAE on natural language text show that the induced embedding space has the desired properties of a VAE for Transformers.}
}