@inproceedings{da-2019-jeff,
title = "Jeff Da at {COIN} - Shared Task: {BIG} {MOOD}: Relating Transformers to Explicit Commonsense Knowledge",
author = "Da, Jeff",
editor = "Ostermann, Simon and
Zhang, Sheng and
Roth, Michael and
Clark, Peter",
booktitle = "Proceedings of the First Workshop on Commonsense Inference in Natural Language Processing",
month = nov,
year = "2019",
address = "Hong Kong, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/D19-6010/",
doi = "10.18653/v1/D19-6010",
pages = "85--92",
abstract = "We introduce a simple yet effective method of integrating contextual embeddings with commonsense graph embeddings, dubbed BERT Infused Graphs: Matching Over Other embeDdings. First, we introduce a preprocessing method to improve the speed of querying knowledge bases. Then, we develop a method of creating knowledge embeddings from each knowledge base. We introduce a method of aligning tokens between two misaligned tokenization methods. Finally, we contribute a method of contextualizing BERT after combining with knowledge base embeddings. We also show BERTs tendency to correct lower accuracy question types. Our model achieves a higher accuracy than BERT, and we score fifth on the official leaderboard of the shared task and score the highest without any additional language model pretraining."
}
Markdown (Informal)
[Jeff Da at COIN - Shared Task: BIG MOOD: Relating Transformers to Explicit Commonsense Knowledge](https://preview.aclanthology.org/add-emnlp-2024-awards/D19-6010/) (Da, 2019)
ACL