@inproceedings{sharma-roychowdhury-2019-iit,
title = "{IIT}-{KGP} at {COIN} 2019: Using pre-trained Language Models for modeling Machine Comprehension",
author = "Sharma, Prakhar and
Roychowdhury, Sumegh",
editor = "Ostermann, Simon and
Zhang, Sheng and
Roth, Michael and
Clark, Peter",
booktitle = "Proceedings of the First Workshop on Commonsense Inference in Natural Language Processing",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest_wac_2008/D19-6009/",
doi = "10.18653/v1/D19-6009",
pages = "80--84",
abstract = "In this paper, we describe our system for COIN 2019 Shared Task 1: Commonsense Inference in Everyday Narrations. We show the power of leveraging state-of-the-art pre-trained language models such as BERT(Bidirectional Encoder Representations from Transformers) and XLNet over other Commonsense Knowledge Base Resources such as ConceptNet and NELL for modeling machine comprehension. We used an ensemble of BERT-Large and XLNet-Large. Experimental results show that our model give substantial improvements over the baseline and other systems incorporating knowledge bases. We bagged 2nd position on the final test set leaderboard with an accuracy of 90.5{\%}"
}
Markdown (Informal)
[IIT-KGP at COIN 2019: Using pre-trained Language Models for modeling Machine Comprehension](https://preview.aclanthology.org/ingest_wac_2008/D19-6009/) (Sharma & Roychowdhury, 2019)
ACL