@inproceedings{lai-etal-2022-multilingual,
title = "Multilingual Pre-training with Language and Task Adaptation for Multilingual Text Style Transfer",
author = "Lai, Huiyuan and
Toral, Antonio and
Nissim, Malvina",
editor = "Muresan, Smaranda and
Nakov, Preslav and
Villavicencio, Aline",
booktitle = "Proceedings of the 60th Annual Meeting of the Association for Computational Linguistics (Volume 2: Short Papers)",
month = may,
year = "2022",
address = "Dublin, Ireland",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-short.29/",
doi = "10.18653/v1/2022.acl-short.29",
pages = "262--271",
abstract = "We exploit the pre-trained seq2seq model mBART for multilingual text style transfer. Using machine translated data as well as gold aligned English sentences yields state-of-the-art results in the three target languages we consider. Besides, in view of the general scarcity of parallel data, we propose a modular approach for multilingual formality transfer, which consists of two training strategies that target adaptation to both language and task. Our approach achieves competitive performance without monolingual task-specific parallel data and can be applied to other style transfer tasks as well as to other languages."
}
Markdown (Informal)
[Multilingual Pre-training with Language and Task Adaptation for Multilingual Text Style Transfer](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.acl-short.29/) (Lai et al., ACL 2022)
ACL