@inproceedings{lai-etal-2022-m4,
title = "m$^4$ Adapter: Multilingual Multi-Domain Adaptation for Machine Translation with a Meta-Adapter",
author = "Lai, Wen and
Chronopoulou, Alexandra and
Fraser, Alexander",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2022",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.findings-emnlp.315/",
doi = "10.18653/v1/2022.findings-emnlp.315",
pages = "4282--4296",
abstract = "Multilingual neural machine translation models (MNMT) yield state-of-the-art performance when evaluated on data from a domain and language pair seen at training time. However, when a MNMT model is used to translate under domain shift or to a new language pair, performance drops dramatically. We consider a very challenging scenario: adapting the MNMT model both to a new domain and to a new language pair at the same time. In this paper, we propose m{\textasciicircum}4Adapter (Multilingual Multi-Domain Adaptation for Machine Translation with a Meta-Adapter), which combines domain and language knowledge using meta-learning with adapters. We present results showing that our approach is a parameter-efficient solution which effectively adapts a model to both a new language pair and a new domain, while outperforming other adapter methods. An ablation study also shows that our approach more effectively transfers domain knowledge across different languages and language information across different domains."
}
Markdown (Informal)
[m4 Adapter: Multilingual Multi-Domain Adaptation for Machine Translation with a Meta-Adapter](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.findings-emnlp.315/) (Lai et al., Findings 2022)
ACL