@inproceedings{lu-zhang-2022-norm,
title = "Norm-based Noisy Corpora Filtering and Refurbishing in Neural Machine Translation",
author = "Lu, Yu and
Zhang, Jiajun",
editor = "Goldberg, Yoav and
Kozareva, Zornitsa and
Zhang, Yue",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, United Arab Emirates",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.emnlp-main.363/",
doi = "10.18653/v1/2022.emnlp-main.363",
pages = "5414--5425",
abstract = "Recent advances in neural machine translation depend on massive parallel corpora, which are collected from any open source without much guarantee of quality. It stresses the need for noisy corpora filtering, but existing methods are insufficient to solve this issue. They spend much time ensembling multiple scorers trained on clean bitexts, unavailable for low-resource languages in practice. In this paper, we propose a norm-based noisy corpora filtering and refurbishing method with no external data and costly scorers. The noisy and clean samples are separated based on how much information from the source and target sides the model requires to fit the given translation. For the unparallel sentence, the target-side history translation is much more important than the source context, contrary to the parallel ones. The amount of these two information flows can be measured by norms of source-/target-side context vectors. Moreover, we propose to reuse the discovered noisy data by generating pseudo labels via online knowledge distillation. Extensive experiments show that our proposed filtering method performs comparably with state-of-the-art noisy corpora filtering techniques but is more efficient and easier to operate. Noisy sample refurbishing further enhances the performance by making the most of the given data."
}
Markdown (Informal)
[Norm-based Noisy Corpora Filtering and Refurbishing in Neural Machine Translation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2022.emnlp-main.363/) (Lu & Zhang, EMNLP 2022)
ACL