@inproceedings{aloraini-poesio-2020-anaphoric,
title = "Anaphoric Zero Pronoun Identification: A Multilingual Approach",
author = "Aloraini, Abdulrahman and
Poesio, Massimo",
editor = "Ogrodniczuk, Maciej and
Ng, Vincent and
Grishina, Yulia and
Pradhan, Sameer",
booktitle = "Proceedings of the Third Workshop on Computational Models of Reference, Anaphora and Coreference",
month = dec,
year = "2020",
address = "Barcelona, Spain (online)",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2020.crac-1.3/",
pages = "22--32",
abstract = "Pro-drop languages such as Arabic, Chinese, Italian or Japanese allow morphologically null but referential arguments in certain syntactic positions, called anaphoric zero-pronouns. Much NLP work on anaphoric zero-pronouns (AZP) is based on gold mentions, but models for their identification are a fundamental prerequisite for their resolution in real-life applications. Such identification requires complex language understanding and knowledge of real-world entities. Transfer learning models, such as BERT, have recently shown to learn surface, syntactic, and semantic information,which can be very useful in recognizing AZPs. We propose a BERT-based multilingual model for AZP identification from predicted zero pronoun positions, and evaluate it on the Arabic and Chinese portions of OntoNotes 5.0. As far as we know, this is the first neural network model of AZP identification for Arabic; and our approach outperforms the stateof-the-art for Chinese. Experiment results suggest that BERT implicitly encode information about AZPs through their surrounding context."
}
Markdown (Informal)
[Anaphoric Zero Pronoun Identification: A Multilingual Approach](https://preview.aclanthology.org/Author-page-Marten-During-lu/2020.crac-1.3/) (Aloraini & Poesio, CRAC 2020)
ACL