@inproceedings{saunders-deneefe-2024-domain,
title = "Domain adapted machine translation: What does catastrophic forgetting forget and why?",
author = "Saunders, Danielle and
DeNeefe, Steve",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.704/",
doi = "10.18653/v1/2024.emnlp-main.704",
pages = "12660--12671",
abstract = "Neural Machine Translation (NMT) models can be specialized by domain adaptation, often involving fine-tuning on a dataset of interest. This process risks catastrophic forgetting: rapid loss of generic translation quality. Forgetting has been widely observed, with many mitigation methods proposed. However, the causes of forgetting and the relationship between forgetting and adaptation data are underexplored.This paper takes a novel approach to understanding catastrophic forgetting during NMT adaptation by investigating the impact of the data. We provide a first investigation of what is forgotten, and why. We examine the relationship between forgetting and the in-domain data, and show that the amount and type of forgetting is linked to that data{'}s target vocabulary coverage. Our findings pave the way toward better informed NMT domain adaptation."
}
Markdown (Informal)
[Domain adapted machine translation: What does catastrophic forgetting forget and why?](https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.704/) (Saunders & DeNeefe, EMNLP 2024)
ACL