@inproceedings{held-etal-2021-focus,
title = "Focus on what matters: Applying Discourse Coherence Theory to Cross Document Coreference",
author = "Held, William and
Iter, Dan and
Jurafsky, Dan",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/landing_page/2021.emnlp-main.106/",
doi = "10.18653/v1/2021.emnlp-main.106",
pages = "1406--1417",
abstract = "Performing event and entity coreference resolution across documents vastly increases the number of candidate mentions, making it intractable to do the full $n^2$ pairwise comparisons. Existing approaches simplify by considering coreference only within document clusters, but this fails to handle inter-cluster coreference, common in many applications. As a result cross-document coreference algorithms are rarely applied to downstream tasks. We draw on an insight from discourse coherence theory: potential coreferences are constrained by the reader{'}s discourse focus. We model the entities/events in a reader{'}s focus as a neighborhood within a learned latent embedding space which minimizes the distance between mentions and the centroids of their gold coreference clusters. We then use these neighborhoods to sample only hard negatives to train a fine-grained classifier on mention pairs and their local discourse features. Our approach achieves state-of-the-art results for both events and entities on the ECB+, Gun Violence, Football Coreference, and Cross-Domain Cross-Document Coreference corpora. Furthermore, training on multiple corpora improves average performance across all datasets by 17.2 F1 points, leading to a robust coreference resolution model that is now feasible to apply to downstream tasks."
}
Markdown (Informal)
[Focus on what matters: Applying Discourse Coherence Theory to Cross Document Coreference](https://preview.aclanthology.org/landing_page/2021.emnlp-main.106/) (Held et al., EMNLP 2021)
ACL