@inproceedings{xia-etal-2020-incremental,
title = "Incremental Neural Coreference Resolution in Constant Memory",
author = "Xia, Patrick and
Sedoc, Jo{\~a}o and
Van Durme, Benjamin",
editor = "Webber, Bonnie and
Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.695/",
doi = "10.18653/v1/2020.emnlp-main.695",
pages = "8617--8624",
abstract = "We investigate modeling coreference resolution under a fixed memory constraint by extending an incremental clustering algorithm to utilize contextualized encoders and neural components. Given a new sentence, our end-to-end algorithm proposes and scores each mention span against explicit entity representations created from the earlier document context (if any). These spans are then used to update the entity`s representations before being forgotten; we only retain a fixed set of salient entities throughout the document. In this work, we successfully convert a high-performing model (Joshi et al., 2020), asymptotically reducing its memory usage to constant space with only a 0.3{\%} relative loss in F1 on OntoNotes 5.0."
}
Markdown (Informal)
[Incremental Neural Coreference Resolution in Constant Memory](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.emnlp-main.695/) (Xia et al., EMNLP 2020)
ACL