@inproceedings{homburger-bar-2025-large,
title = "Large Temporal Models: Unlocking Temporal Understanding in {LLM}s for Temporal Relation Classification",
author = "Homburger, Omri and
Bar, Kfir",
editor = "Inui, Kentaro and
Sakti, Sakriani and
Wang, Haofen and
Wong, Derek F. and
Bhattacharyya, Pushpak and
Banerjee, Biplab and
Ekbal, Asif and
Chakraborty, Tanmoy and
Singh, Dhirendra Pratap",
booktitle = "Proceedings of the 14th International Joint Conference on Natural Language Processing and the 4th Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics",
month = dec,
year = "2025",
address = "Mumbai, India",
publisher = "The Asian Federation of Natural Language Processing and The Association for Computational Linguistics",
url = "https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.ijcnlp-long.117/",
pages = "2156--2171",
ISBN = "979-8-89176-298-5",
abstract = "We present Large Temporal Model, a Large Language Model (LLM) that excels in Temporal Relation Classification (TRC). We show how a carefully designed fine-tuning strategy, using a novel two-step fine-tuning approach, can adapt LLMs for TRC. Our approach is focused on global TRC, enabling simultaneous classification of all temporal relations within a document. Unlike traditional pairwise methods, our approach performs global inference in a single step, improving both efficiency and consistency. Evaluations on the MATRES and OmniTemp benchmarks demonstrate that, for the first time, an LLM achieves state-of-the-art performance, outperforming previous pairwise and global TRC methods. Results show that our global approach produces more consistent and accurate temporal graphs. Ablation studies further validate the effectiveness of our two-step fine-tuning strategy, while analyses reveal why our approach succeeds in increasing performance and reducing inconsistencies."
}Markdown (Informal)
[Large Temporal Models: Unlocking Temporal Understanding in LLMs for Temporal Relation Classification](https://preview.aclanthology.org/ingest-ijcnlp-aacl/2025.ijcnlp-long.117/) (Homburger & Bar, IJCNLP-AACL 2025)
ACL