@inproceedings{yang-etal-2020-improving,
title = "Improving Event Duration Prediction via Time-aware Pre-training",
author = "Yang, Zonglin and
Du, Xinya and
Rush, Alexander and
Cardie, Claire",
editor = "Cohn, Trevor and
He, Yulan and
Liu, Yang",
booktitle = "Findings of the Association for Computational Linguistics: EMNLP 2020",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Author-page-Marten-During-lu/2020.findings-emnlp.302/",
doi = "10.18653/v1/2020.findings-emnlp.302",
pages = "3370--3378",
abstract = "End-to-end models in NLP rarely encode external world knowledge about length of time. We introduce two effective models for duration prediction, which incorporate external knowledge by reading temporal-related news sentences (time-aware pre-training). Specifically, one model predicts the range/unit where the duration value falls in (R-PRED); and the other predicts the exact duration value (E-PRED). Our best model {--} E-PRED, substantially outperforms previous work, and captures duration information more accurately than R-PRED. We also demonstrate our models are capable of duration prediction in the unsupervised setting, outperforming the baselines."
}
Markdown (Informal)
[Improving Event Duration Prediction via Time-aware Pre-training](https://preview.aclanthology.org/Author-page-Marten-During-lu/2020.findings-emnlp.302/) (Yang et al., Findings 2020)
ACL