@inproceedings{mehri-etal-2025-discourse,
title = "Discourse Relation Recognition with Language Models Under Different Data Availability",
author = "Mehri, Shuhaib and
Li, Chuyuan and
Carenini, Giuseppe",
editor = "Strube, Michael and
Braud, Chloe and
Hardmeier, Christian and
Li, Junyi Jessy and
Loaiciga, Sharid and
Zeldes, Amir and
Li, Chuyuan",
booktitle = "Proceedings of the 6th Workshop on Computational Approaches to Discourse, Context and Document-Level Inferences (CODI 2025)",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-jaejun-lee-kaist/2025.codi-1.13/",
doi = "10.18653/v1/2025.codi-1.13",
pages = "148--156",
ISBN = "979-8-89176-343-2",
abstract = "Large Language Models (LLMs) have demonstrated remarkable performance across various NLP tasks, yet they continue to face challenges in discourse relation recognition (DRR). Current state-of-the-art methods for DRR primarily rely on smaller pre-trained language models (PLMs). In this study, we conduct a comprehensive analysis of different approaches using both PLMs and LLMs, evaluating their effectiveness for DRR at multiple granularities and under different data availability settings. Our findings indicate that no single approach consistently outperforms the others, and we offer a general comparison framework to guide the selection of the most appropriate model based on specific DRR requirements and data conditions."
}Markdown (Informal)
[Discourse Relation Recognition with Language Models Under Different Data Availability](https://preview.aclanthology.org/author-page-jaejun-lee-kaist/2025.codi-1.13/) (Mehri et al., CODI 2025)
ACL