@inproceedings{min-2021-exploring,
title = "Exploring Pre-Trained Transformers and Bilingual Transfer Learning for {A}rabic Coreference Resolution",
author = "Min, Bonan",
editor = "Ogrodniczuk, Maciej and
Pradhan, Sameer and
Poesio, Massimo and
Grishina, Yulia and
Ng, Vincent",
booktitle = "Proceedings of the Fourth Workshop on Computational Models of Reference, Anaphora and Coreference",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.crac-1.10/",
doi = "10.18653/v1/2021.crac-1.10",
pages = "94--99",
abstract = "In this paper, we develop bilingual transfer learning approaches to improve Arabic coreference resolution by leveraging additional English annotation via bilingual or multilingual pre-trained transformers. We show that bilingual transfer learning improves the strong transformer-based neural coreference models by 2-4 F1. We also systemically investigate the effectiveness of several pre-trained transformer models that differ in training corpora, languages covered, and model capacity. Our best model achieves a new state-of-the-art performance of 64.55 F1 on the Arabic OntoNotes dataset. Our code is publicly available at \url{https://github.com/bnmin/arabic_coref}."
}
Markdown (Informal)
[Exploring Pre-Trained Transformers and Bilingual Transfer Learning for Arabic Coreference Resolution](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.crac-1.10/) (Min, CRAC 2021)
ACL