@inproceedings{kimura-etal-2021-towards-language,
title = "Towards a Language Model for Temporal Commonsense Reasoning",
author = "Kimura, Mayuko and
Kanashiro Pereira, Lis and
Kobayashi, Ichiro",
editor = "Djabri, Souhila and
Gimadi, Dinara and
Mihaylova, Tsvetomila and
Nikolova-Koleva, Ivelina",
booktitle = "Proceedings of the Student Research Workshop Associated with RANLP 2021",
month = sep,
year = "2021",
address = "Online",
publisher = "INCOMA Ltd.",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.ranlp-srw.12/",
pages = "78--84",
abstract = "Temporal commonsense reasoning is a challenging task as it requires temporal knowledge usually not explicit in text. In this work, we propose an ensemble model for temporal commonsense reasoning. Our model relies on pre-trained contextual representations from transformer-based language models (i.e., BERT), and on a variety of training methods for enhancing model generalization: 1) multi-step fine-tuning using carefully selected auxiliary tasks and datasets, and 2) a specifically designed temporal masked language model task aimed to capture temporal commonsense knowledge. Our model greatly outperforms the standard fine-tuning approach and strong baselines on the MC-TACO dataset."
}
Markdown (Informal)
[Towards a Language Model for Temporal Commonsense Reasoning](https://preview.aclanthology.org/jlcl-multiple-ingestion/2021.ranlp-srw.12/) (Kimura et al., RANLP 2021)
ACL