@inproceedings{park-etal-2024-coconut,
title = "Coconut: Contextualized Commonsense Unified Transformers for Graph-Based Commonsense Augmentation of Language Models",
author = "Park, Jun-Hyung and
Lee, Mingyu and
Kim, Junho and
Lee, SangKeun",
editor = "Ku, Lun-Wei and
Martins, Andre and
Srikumar, Vivek",
booktitle = "Findings of the Association for Computational Linguistics: ACL 2024",
month = aug,
year = "2024",
address = "Bangkok, Thailand",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.346/",
doi = "10.18653/v1/2024.findings-acl.346",
pages = "5815--5830",
abstract = "In this paper, we introduce COCONUT to effectively guide the contextualization of structured commonsense knowledge based on largelanguage models. COCONUT employs a contextualized knowledge prompting scheme to gather high-quality contextualization examplesfrom a large language model. These examples are subsequently distilled into small language models to enhance their contextualization capability. Extensive evaluations show that COCONUT considerably improves commonsense reasoning performance across diverse benchmarks, models, and settings, exhibiting its flexibility and universality in generating contextualized commonsense knowledge. Notably,COCONUT consistently outperforms the state-of-the-art technique by an average of 5.8{\%}."
}
Markdown (Informal)
[Coconut: Contextualized Commonsense Unified Transformers for Graph-Based Commonsense Augmentation of Language Models](https://preview.aclanthology.org/fix-sig-urls/2024.findings-acl.346/) (Park et al., Findings 2024)
ACL