@inproceedings{cengiz-etal-2019-ku,
title = "{KU}{\_}ai at {MEDIQA} 2019: Domain-specific Pre-training and Transfer Learning for Medical {NLI}",
author = "Cengiz, Cemil and
Sert, Ula{\c{s}} and
Yuret, Deniz",
editor = "Demner-Fushman, Dina and
Cohen, Kevin Bretonnel and
Ananiadou, Sophia and
Tsujii, Junichi",
booktitle = "Proceedings of the 18th BioNLP Workshop and Shared Task",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-5045/",
doi = "10.18653/v1/W19-5045",
pages = "427--436",
abstract = "In this paper, we describe our system and results submitted for the Natural Language Inference (NLI) track of the MEDIQA 2019 Shared Task. As KU{\_}ai team, we used BERT as our baseline model and pre-processed the MedNLI dataset to mitigate the negative impact of de-identification artifacts. Moreover, we investigated different pre-training and transfer learning approaches to improve the performance. We show that pre-training the language model on rich biomedical corpora has a significant effect in teaching the model domain-specific language. In addition, training the model on large NLI datasets such as MultiNLI and SNLI helps in learning task-specific reasoning. Finally, we ensembled our highest-performing models, and achieved 84.7{\%} accuracy on the unseen test dataset and ranked 10th out of 17 teams in the official results."
}
Markdown (Informal)
[KU_ai at MEDIQA 2019: Domain-specific Pre-training and Transfer Learning for Medical NLI](https://preview.aclanthology.org/jlcl-multiple-ingestion/W19-5045/) (Cengiz et al., BioNLP 2019)
ACL