@inproceedings{tang-etal-2023-learning,
title = "Learning Dynamic Contextualised Word Embeddings via Template-based Temporal Adaptation",
author = "Tang, Xiaohang and
Zhou, Yi and
Bollegala, Danushka",
editor = "Rogers, Anna and
Boyd-Graber, Jordan and
Okazaki, Naoaki",
booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
month = jul,
year = "2023",
address = "Toronto, Canada",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-long.520/",
doi = "10.18653/v1/2023.acl-long.520",
pages = "9352--9369",
abstract = "Dynamic contextualised word embeddings (DCWEs) represent the temporal semantic variations of words. We propose a method for learning DCWEs by time-adapting a pretrained Masked Language Model (MLM) using time-sensitive templates. Given two snapshots $C_1$ and $C_2$ of a corpus taken respectively at two distinct timestamps $T_1$ and $T_2$, we first propose an unsupervised method to select (a) \textit{pivot} terms related to both $C_1$ and $C_2$, and (b) \textit{anchor} terms that are associated with a specific pivot term in each individual snapshot.We then generate prompts by filling manually compiled templates using the extracted pivot and anchor terms.Moreover, we propose an automatic method to learn time-sensitive templates from $C_1$ and $C_2$, without requiring any human supervision.Next, we use the generated prompts to adapt a pretrained MLM to $T_2$ by fine-tuning using those prompts.Multiple experiments show that our proposed method significantly reduces the perplexity of test sentences in $C_2$, outperforming the current state-of-the-art."
}
Markdown (Informal)
[Learning Dynamic Contextualised Word Embeddings via Template-based Temporal Adaptation](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.acl-long.520/) (Tang et al., ACL 2023)
ACL