@inproceedings{jin-etal-2021-instance,
title = "Instance-adaptive training with noise-robust losses against noisy labels",
author = "Jin, Lifeng and
Song, Linfeng and
Xu, Kun and
Yu, Dong",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.457/",
doi = "10.18653/v1/2021.emnlp-main.457",
pages = "5647--5663",
abstract = "In order to alleviate the huge demand for annotated datasets for different tasks, many recent natural language processing datasets have adopted automated pipelines for fast-tracking usable data. However, model training with such datasets poses a challenge because popular optimization objectives are not robust to label noise induced in the annotation generation process. Several noise-robust losses have been proposed and evaluated on tasks in computer vision, but they generally use a single dataset-wise hyperparamter to control the strength of noise resistance. This work proposes novel instance-adaptive training frameworks to change single dataset-wise hyperparameters of noise resistance in such losses to be instance-wise. Such instance-wise noise resistance hyperparameters are predicted by special instance-level label quality predictors, which are trained along with the main classification models. Experiments on noisy and corrupted NLP datasets show that proposed instance-adaptive training frameworks help increase the noise-robustness provided by such losses, promoting the use of the frameworks and associated losses in NLP models trained with noisy data."
}
Markdown (Informal)
[Instance-adaptive training with noise-robust losses against noisy labels](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.emnlp-main.457/) (Jin et al., EMNLP 2021)
ACL