@inproceedings{jeawak-etal-2020-cardiff,
title = "{C}ardiff {U}niversity at {S}em{E}val-2020 Task 6: Fine-tuning {BERT} for Domain-Specific Definition Classification",
author = "Jeawak, Shelan and
Espinosa-Anke, Luis and
Schockaert, Steven",
editor = "Herbelot, Aurelie and
Zhu, Xiaodan and
Palmer, Alexis and
Schneider, Nathan and
May, Jonathan and
Shutova, Ekaterina",
booktitle = "Proceedings of the Fourteenth Workshop on Semantic Evaluation",
month = dec,
year = "2020",
address = "Barcelona (online)",
publisher = "International Committee for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2020.semeval-1.44/",
doi = "10.18653/v1/2020.semeval-1.44",
pages = "361--366",
abstract = "We describe the system submitted to SemEval-2020 Task 6, Subtask 1. The aim of this subtask is to predict whether a given sentence contains a definition or not. Unsurprisingly, we found that strong results can be achieved by fine-tuning a pre-trained BERT language model. In this paper, we analyze the performance of this strategy. Among others, we show that results can be improved by using a two-step fine-tuning process, in which the BERT model is first fine-tuned on the full training set, and then further specialized towards a target domain."
}
Markdown (Informal)
[Cardiff University at SemEval-2020 Task 6: Fine-tuning BERT for Domain-Specific Definition Classification](https://preview.aclanthology.org/add-emnlp-2024-awards/2020.semeval-1.44/) (Jeawak et al., SemEval 2020)
ACL