@inproceedings{ruiter-etal-2020-self,
title = "Self-Induced Curriculum Learning in Self-Supervised Neural Machine Translation",
author = "Ruiter, Dana and
van Genabith, Josef and
Espa{\~n}a-Bonet, Cristina",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.202/",
doi = "10.18653/v1/2020.emnlp-main.202",
pages = "2560--2571",
abstract = "Self-supervised neural machine translation (SSNMT) jointly learns to identify and select suitable training data from comparable (rather than parallel) corpora and to translate, in a way that the two tasks support each other in a virtuous circle. In this study, we provide an in-depth analysis of the sampling choices the SSNMT model makes during training. We show how, without it having been told to do so, the model self-selects samples of increasing (i) complexity and (ii) task-relevance in combination with (iii) performing a denoising curriculum. We observe that the dynamics of the mutual-supervision signals of both system internal representation types are vital for the extraction and translation performance. We show that in terms of the Gunning-Fog Readability index, SSNMT starts extracting and learning from Wikipedia data suitable for high school students and quickly moves towards content suitable for first year undergraduate students."
}
Markdown (Informal)
[Self-Induced Curriculum Learning in Self-Supervised Neural Machine Translation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.202/) (Ruiter et al., EMNLP 2020)
ACL