@inproceedings{tan-etal-2022-msp,
title = "{MSP}: Multi-Stage Prompting for Making Pre-trained Language Models Better Translators",
author = "Tan, Zhixing and
Zhang, Xiangwen and
Wang, Shuo and
Liu, Yang",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2022.acl-long.424/",
doi = "10.18653/v1/2022.acl-long.424",
pages = "6131--6142",
abstract = "Prompting has recently been shown as a promising approach for applying pre-trained language models to perform downstream tasks. We present Multi-Stage Prompting, a simple and automatic approach for leveraging pre-trained language models to translation tasks. To better mitigate the discrepancy between pre-training and translation, MSP divides the translation process via pre-trained language models into three separate stages: the encoding stage, the re-encoding stage, and the decoding stage. During each stage, we independently apply different continuous prompts for allowing pre-trained language models better shift to translation tasks. We conduct extensive experiments on three translation tasks. Experiments show that our method can significantly improve the translation performance of pre-trained language models."
}
Markdown (Informal)
[MSP: Multi-Stage Prompting for Making Pre-trained Language Models Better Translators](https://preview.aclanthology.org/add-emnlp-2024-awards/2022.acl-long.424/) (Tan et al., ACL 2022)
ACL