@inproceedings{lin-etal-2023-self,
title = "Self-distilled Transitive Instance Weighting for Denoised Distantly Supervised Relation Extraction",
author = "Lin, Xiangyu and
Jia, Weijia and
Gong, Zhiguo",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2023",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-emnlp.13/",
doi = "10.18653/v1/2023.findings-emnlp.13",
pages = "168--180",
abstract = "The widespread existence of wrongly labeled instances is a challenge to distantly supervised relation extraction. Most of the previous works are trained in a bag-level setting to alleviate such noise. However, sentence-level training better utilizes the information than bag-level training, as long as combined with effective noise alleviation. In this work, we propose a novel Transitive Instance Weighting mechanism integrated with the self-distilled BERT backbone, utilizing information in the intermediate outputs to generate dynamic instance weights for denoised sentence-level training. By down-weighting wrongly labeled instances and discounting the weights of easy-to-fit ones, our method can effectively tackle wrongly labeled instances and prevent overfitting. Experiments on both held-out and manual datasets indicate that our method achieves state-of-the-art performance and consistent improvements over the baselines."
}
Markdown (Informal)
[Self-distilled Transitive Instance Weighting for Denoised Distantly Supervised Relation Extraction](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.findings-emnlp.13/) (Lin et al., Findings 2023)
ACL