@inproceedings{wang-etal-2024-multi-pass,
title = "Multi-pass Decoding for Grammatical Error Correction",
author = "Wang, Xiaoying and
Mu, Lingling and
Zhang, Jingyi and
Xu, Hongfei",
editor = "Al-Onaizan, Yaser and
Bansal, Mohit and
Chen, Yun-Nung",
booktitle = "Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2024",
address = "Miami, Florida, USA",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.553/",
doi = "10.18653/v1/2024.emnlp-main.553",
pages = "9904--9916",
abstract = "Sequence-to-sequence (seq2seq) models achieve comparable or better grammatical error correction performance compared to sequence-to-edit (seq2edit) models. Seq2edit models normally iteratively refine the correction result, while seq2seq models decode only once without aware of subsequent tokens. Iteratively refining the correction results of seq2seq models via Multi-Pass Decoding (MPD) may lead to better performance. However, MPD increases the inference costs. Deleting or replacing corrections in previous rounds may lose useful information in the source input. We present an early-stop mechanism to alleviate the efficiency issue. To address the source information loss issue, we propose to merge the source input with the previous round correction result into one sequence. Experiments on the CoNLL-14 test set and BEA-19 test set show that our approach can lead to consistent and significant improvements over strong BART and T5 baselines (+1.80, +1.35, and +2.02 F0.5 for BART 12-2, large and T5 large respectively on CoNLL-14 and +2.99, +1.82, and +2.79 correspondingly on BEA-19), obtaining F0.5 scores of 68.41 and 75.36 on CoNLL-14 and BEA-19 respectively."
}
Markdown (Informal)
[Multi-pass Decoding for Grammatical Error Correction](https://preview.aclanthology.org/fix-sig-urls/2024.emnlp-main.553/) (Wang et al., EMNLP 2024)
ACL
- Xiaoying Wang, Lingling Mu, Jingyi Zhang, and Hongfei Xu. 2024. Multi-pass Decoding for Grammatical Error Correction. In Proceedings of the 2024 Conference on Empirical Methods in Natural Language Processing, pages 9904–9916, Miami, Florida, USA. Association for Computational Linguistics.