@inproceedings{kim-etal-2023-towards,
title = "Towards Formality-Aware Neural Machine Translation by Leveraging Context Information",
author = "Kim, Dohee and
Baek, Yujin and
Yang, Soyoung and
Choo, Jaegul",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.494/",
doi = "10.18653/v1/2023.findings-emnlp.494",
pages = "7384--7392",
abstract = "Formality is one of the most important linguistic properties to determine the naturalness of translation. Although a target-side context contains formality-related tokens, the sparsity within the context makes it difficult for context-aware neural machine translation (NMT) models to properly discern them. In this paper, we introduce a novel training method to explicitly inform the NMT model by pinpointing key informative tokens using a formality classifier. Given a target context, the formality classifier guides the model to concentrate on the formality-related tokens within the context. Additionally, we modify the standard cross-entropy loss, especially toward the formality-related tokens obtained from the classifier. Experimental results show that our approaches not only improve overall translation quality but also reflect the appropriate formality from the target context."
}
Markdown (Informal)
[Towards Formality-Aware Neural Machine Translation by Leveraging Context Information](https://preview.aclanthology.org/fix-sig-urls/2023.findings-emnlp.494/) (Kim et al., Findings 2023)
ACL