@inproceedings{fan-strube-2025-consistent,
title = "Consistent Discourse-level Temporal Relation Extraction Using Large Language Models",
author = "Fan, Yi and
Strube, Michael",
editor = "Christodoulopoulos, Christos and
Chakraborty, Tanmoy and
Rose, Carolyn and
Peng, Violet",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2025",
month = nov,
year = "2025",
address = "Suzhou, China",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1010/",
doi = "10.18653/v1/2025.findings-emnlp.1010",
pages = "18605--18622",
ISBN = "979-8-89176-335-7",
abstract = "Understanding temporal relations between events in a text is essential for determining its temporal structure. Recent advancements in large language models (LLMs) have spurred research on temporal relation extraction. However, LLMs perform poorly in zero-shot and few-shot settings, often underperforming smaller fine-tuned models. Despite these limitations, little attention has been given to improving LLMs in temporal structure extraction tasks. This study systematically examines LLMs' ability to extract and infer discourse-level temporal relations, identifying factors influencing their reasoning and extraction capabilities, including input context, reasoning process and ensuring consistency. We propose a three-step framework to improve LLMs' temporal relation extraction capabilities: context selection, prompts inspired by Allen{'}s interval algebra (Allen, 1983), and reflection-based consistency learning (Shinn et al., 2024). Our results show the effectiveness of our method in guiding LLMs towards structured processing of temporal structure in discourse."
}Markdown (Informal)
[Consistent Discourse-level Temporal Relation Extraction Using Large Language Models](https://preview.aclanthology.org/author-page-yu-wang-polytechnic/2025.findings-emnlp.1010/) (Fan & Strube, Findings 2025)
ACL