@inproceedings{you-etal-2024-efficient,
title = "Efficient Domain Adaptation for Non-Autoregressive Machine Translation",
author = "You, WangJie and
Guo, Pei and
Li, Juntao and
Chen, Kehai and
Zhang, Min",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2024",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2024.findings-acl.810/",
doi = "10.18653/v1/2024.findings-acl.810",
pages = "13657--13670",
abstract = "Domain adaptation remains a challenge in the realm of Neural Machine Translation (NMT), even in the era of large language models (LLMs). Existing non-parametric approaches like nearest neighbor machine translation have made small Autoregressive Translation (AT) models achieve efficient domain generalization and adaptation without updating parameters, but leaving the Non-Autoregressive Translation (NAT) counterparts under-explored. To fill this blank, we introduce $Bi$-$k$NN, an innovative and efficient domain adaptation approach for NAT models that tailors a k-nearest-neighbor algorithm for NAT. Specifically, we introduce an effective datastore construction and correlated updating strategies to conform the parallel nature of NAT. Additionally, we train a meta-network that seamlessly integrates the NN distribution with the NMT distribution robustly during the iterative decoding process of NAT. Our experimental results across four benchmark datasets demonstrate that our $Bi$-$k$NN not only achieves significant improvements over the Base-NAT model (7.8 BLEU on average) but also exhibits enhanced efficiency."
}
Markdown (Informal)
[Efficient Domain Adaptation for Non-Autoregressive Machine Translation](https://preview.aclanthology.org/add-emnlp-2024-awards/2024.findings-acl.810/) (You et al., Findings 2024)
ACL