%Aigaion2 BibTeX export from Idiap Publications
%Friday 05 December 2025 11:15:20 PM

@ARTICLE{Chen_TASLP_2024,
                      author = {Chen, Haolin and Garner, Philip N.},
                    keywords = {Bayesian transfer learning, catastrophic forgetting, Laplace approximation, parameter-efficient fine-tuning},
                    projects = {Idiap, NAST},
                       month = sep,
                       title = {Bayesian Parameter-Efficient Fine-Tuning for Overcoming Catastrophic Forgetting},
                     journal = {IEEE/ACM Transactions on Audio, Speech, and Language Processing},
                        year = {2024},
                         doi = {10.1109/TASLP.2024.3463395},
                    abstract = {We are motivated primarily by the adaptation of text-to-speech synthesis models; however we argue that more generic parameter-efficient fine-tuning (PEFT) is an appropriate framework to do such adaptation. Nevertheless, catastrophic forgetting remains an issue with PEFT, damaging the pre-trained model's inherent capabilities. We demonstrate that existing Bayesian learning techniques can be applied to PEFT to prevent catastrophic forgetting as long as the parameter shift of the fine-tuned layers can be calculated differentiably. In a principled series of experiments on language modeling and speech synthesis tasks, we utilize established Laplace approximations, including diagonal and Kronecker-factored approaches, to regularize PEFT with the low-rank adaptation (LoRA) and compare their performance in pre-training knowledge preservation. Our results demonstrate that catastrophic forgetting can be overcome by our methods without degrading the fine-tuning performance, and using the Kronecker-factored approximation produces a better preservation of the pre-training knowledge than the diagonal ones.},
                         pdf = {https://publications.idiap.ch/attachments/papers/2024/Chen_TASLP_2024.pdf}
}