@inproceedings{wang-li-2021-pre,
title = "Pre-training Methods for Neural Machine Translation",
author = "Wang, Mingxuan and
Li, Lei",
editor = "Chiang, David and
Zhang, Min",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Tutorial Abstracts",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.acl-tutorials.4/",
doi = "10.18653/v1/2021.acl-tutorials.4",
pages = "21--25",
abstract = "This tutorial provides a comprehensive guide to make the most of pre-training for neural machine translation. Firstly, we will briefly introduce the background of NMT, pre-training methodology, and point out the main challenges when applying pre-training for NMT. Then we will focus on analysing the role of pre-training in enhancing the performance of NMT, how to design a better pre-training model for executing specific NMT tasks and how to better integrate the pre-trained model into NMT system. In each part, we will provide examples, discuss training techniques and analyse what is transferred when applying pre-training."
}
Markdown (Informal)
[Pre-training Methods for Neural Machine Translation](https://preview.aclanthology.org/fix-sig-urls/2021.acl-tutorials.4/) (Wang & Li, ACL-IJCNLP 2021)
ACL
- Mingxuan Wang and Lei Li. 2021. Pre-training Methods for Neural Machine Translation. In Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Tutorial Abstracts, pages 21–25, Online. Association for Computational Linguistics.