@inproceedings{bao-qiao-2019-transfer,
title = "Transfer Learning from Pre-trained {BERT} for Pronoun Resolution",
author = "Bao, Xingce and
Qiao, Qianqian",
editor = "Costa-juss{\`a}, Marta R. and
Hardmeier, Christian and
Radford, Will and
Webster, Kellie",
booktitle = "Proceedings of the First Workshop on Gender Bias in Natural Language Processing",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-3812/",
doi = "10.18653/v1/W19-3812",
pages = "82--88",
abstract = "The paper describes the submission of the team {\textquotedblleft}We used bert!{\textquotedblright} to the shared task Gendered Pronoun Resolution (Pair pronouns to their correct entities). Our final submission model based on the fine-tuned BERT (Bidirectional Encoder Representations from Transformers) ranks 14th among 838 teams with a multi-class logarithmic loss of 0.208. In this work, contribution of transfer learning technique to pronoun resolution systems is investigated and the gender bias contained in classification models is evaluated."
}
Markdown (Informal)
[Transfer Learning from Pre-trained BERT for Pronoun Resolution](https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-3812/) (Bao & Qiao, GeBNLP 2019)
ACL