@inproceedings{thillaisundaram-togia-2019-biomedical,
title = "Biomedical relation extraction with pre-trained language representations and minimal task-specific architecture",
author = "Thillaisundaram, Ashok and
Togia, Theodosia",
editor = "Jin-Dong, Kim and
Claire, N{\'e}dellec and
Robert, Bossy and
Louise, Del{\'e}ger",
booktitle = "Proceedings of the 5th Workshop on BioNLP Open Shared Tasks",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D19-5713/",
doi = "10.18653/v1/D19-5713",
pages = "84--89",
abstract = "This paper presents our participation in the AGAC Track from the 2019 BioNLP Open Shared Tasks. We provide a solution for Task 3, which aims to extract {\textquotedblleft}gene - function change - disease{\textquotedblright} triples, where {\textquotedblleft}gene{\textquotedblright} and {\textquotedblleft}disease{\textquotedblright} are mentions of particular genes and diseases respectively and {\textquotedblleft}function change{\textquotedblright} is one of four pre-defined relationship types. Our system extends BERT (Devlin et al., 2018), a state-of-the-art language model, which learns contextual language representations from a large unlabelled corpus and whose parameters can be fine-tuned to solve specific tasks with minimal additional architecture. We encode the pair of mentions and their textual context as two consecutive sequences in BERT, separated by a special symbol. We then use a single linear layer to classify their relationship into five classes (four pre-defined, as well as {\textquoteleft}no relation'). Despite considerable class imbalance, our system significantly outperforms a random baseline while relying on an extremely simple setup with no specially engineered features."
}
Markdown (Informal)
[Biomedical relation extraction with pre-trained language representations and minimal task-specific architecture](https://preview.aclanthology.org/add-emnlp-2024-awards/D19-5713/) (Thillaisundaram & Togia, BioNLP 2019)
ACL