@inproceedings{ananthram-etal-2020-event,
title = "Event-Guided Denoising for Multilingual Relation Learning",
author = "Ananthram, Amith and
Allaway, Emily and
McKeown, Kathleen",
editor = "Scott, Donia and
Bel, Nuria and
Zong, Chengqing",
booktitle = "Proceedings of the 28th International Conference on Computational Linguistics",
month = dec,
year = "2020",
address = "Barcelona, Spain (Online)",
publisher = "International Committee on Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.131/",
doi = "10.18653/v1/2020.coling-main.131",
pages = "1505--1512",
abstract = "General purpose relation extraction has recently seen considerable gains in part due to a massively data-intensive distant supervision technique from Soares et al. (2019) that produces state-of-the-art results across many benchmarks. In this work, we present a methodology for collecting high quality training data for relation extraction from unlabeled text that achieves a near-recreation of their zero-shot and few-shot results at a fraction of the training cost. Our approach exploits the predictable distributional structure of date-marked news articles to build a denoised corpus {--} the extraction process filters out low quality examples. We show that a smaller multilingual encoder trained on this corpus performs comparably to the current state-of-the-art (when both receive little to no fine-tuning) on few-shot and standard relation benchmarks in English and Spanish despite using many fewer examples (50k vs. 300mil+)."
}
Markdown (Informal)
[Event-Guided Denoising for Multilingual Relation Learning](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.coling-main.131/) (Ananthram et al., COLING 2020)
ACL
- Amith Ananthram, Emily Allaway, and Kathleen McKeown. 2020. Event-Guided Denoising for Multilingual Relation Learning. In Proceedings of the 28th International Conference on Computational Linguistics, pages 1505–1512, Barcelona, Spain (Online). International Committee on Computational Linguistics.