@inproceedings{zhao-etal-2021-effective,
title = "Effective Distant Supervision for Temporal Relation Extraction",
author = "Zhao, Xinyu and
Lin, Shih-Ting and
Durrett, Greg",
editor = "Ben-David, Eyal and
Cohen, Shay and
McDonald, Ryan and
Plank, Barbara and
Reichart, Roi and
Rotman, Guy and
Ziser, Yftah",
booktitle = "Proceedings of the Second Workshop on Domain Adaptation for NLP",
month = apr,
year = "2021",
address = "Kyiv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/add-emnlp-2024-awards/2021.adaptnlp-1.20/",
pages = "195--203",
abstract = "A principal barrier to training temporal relation extraction models in new domains is the lack of varied, high quality examples and the challenge of collecting more. We present a method of automatically collecting distantly-supervised examples of temporal relations. We scrape and automatically label event pairs where the temporal relations are made explicit in text, then mask out those explicit cues, forcing a model trained on this data to learn other signals. We demonstrate that a pre-trained Transformer model is able to transfer from the weakly labeled examples to human-annotated benchmarks in both zero-shot and few-shot settings, and that the masking scheme is important in improving generalization."
}
Markdown (Informal)
[Effective Distant Supervision for Temporal Relation Extraction](https://preview.aclanthology.org/add-emnlp-2024-awards/2021.adaptnlp-1.20/) (Zhao et al., AdaptNLP 2021)
ACL