@inproceedings{padmanabhan-2025-submission,
title = "Submission for {WMT}25 Task 3",
author = "Padmanabhan, Govardhan",
editor = "Haddow, Barry and
Kocmi, Tom and
Koehn, Philipp and
Monz, Christof",
booktitle = "Proceedings of the Tenth Conference on Machine Translation",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.wmt-1.73/",
doi = "10.18653/v1/2025.wmt-1.73",
pages = "984--993",
ISBN = "979-8-89176-341-8",
abstract = "The paper presents two approaches submitted to the WMT 2025 Automated Translation Quality Evaluation Systems Task 3 - Quality Estimation (QE)-informed Segment-level Error Correction. While jointly training QE systems with Automatic Post-Editing (APE) has shown improved performance for both tasks, APE systems are still known to overcorrect the output of Machine Translation (MT), leading to a degradation in performance. We investigate a simple training-free approach - QE-informed Retranslation, and compare it with another within the same training-free paradigm. Our winning approach selects the highest-quality translation from multiple candidates generated by different LLMs. The second approach, more akin to APE, instructs an LLM to replace error substrings as specified in the provided QE explanation(s). A conditional heuristic was employed to minimise the number of edits, with the aim of maximising the Gain-to-Edit ratio. The two proposed approaches achieved a {\ensuremath{\Delta}}COMET scoreof 0.0201 and {\ensuremath{-}}0.0108, respectively, leading the first approach to achieve the winning position on the subtask leaderboard."
}Markdown (Informal)
[Submission for WMT25 Task 3](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.wmt-1.73/) (Padmanabhan, WMT 2025)
ACL
- Govardhan Padmanabhan. 2025. Submission for WMT25 Task 3. In Proceedings of the Tenth Conference on Machine Translation, pages 984–993, Suzhou, China. Association for Computational Linguistics.