@inproceedings{han-etal-2021-econet,
title = "{ECONET}: Effective Continual Pretraining of Language Models for Event Temporal Reasoning",
author = "Han, Rujun and
Ren, Xiang and
Peng, Nanyun",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.emnlp-main.436/",
doi = "10.18653/v1/2021.emnlp-main.436",
pages = "5367--5380",
abstract = "While pre-trained language models (PTLMs) have achieved noticeable success on many NLP tasks, they still struggle for tasks that require event temporal reasoning, which is essential for event-centric applications. We present a continual pre-training approach that equips PTLMs with targeted knowledge about event temporal relations. We design self-supervised learning objectives to recover masked-out event and temporal indicators and to discriminate sentences from their corrupted counterparts (where event or temporal indicators got replaced). By further pre-training a PTLM with these objectives jointly, we reinforce its attention to event and temporal information, yielding enhanced capability on event temporal reasoning. This **E**ffective **CON**tinual pre-training framework for **E**vent **T**emporal reasoning (ECONET) improves the PTLMs' fine-tuning performances across five relation extraction and question answering tasks and achieves new or on-par state-of-the-art performances in most of our downstream tasks."
}
Markdown (Informal)
[ECONET: Effective Continual Pretraining of Language Models for Event Temporal Reasoning](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.emnlp-main.436/) (Han et al., EMNLP 2021)
ACL