@inproceedings{samih-kallmeyer-2023-unsupervised,
title = "Unsupervised Semantic Frame Induction Revisited",
author = "Samih, Younes and
Kallmeyer, Laura",
editor = "Amblard, Maxime and
Breitholtz, Ellen",
booktitle = "Proceedings of the 15th International Conference on Computational Semantics",
month = jun,
year = "2023",
address = "Nancy, France",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.iwcs-1.10/",
pages = "89--93",
abstract = "This paper addresses the task of semantic frame induction based on pre-trained language models (LMs). The current state of the art is to directly use contextualized embeddings from models such as BERT and to cluster them in a two step clustering process (first lemma-internal, then over all verb tokens in the data set). We propose not to use the LM`s embeddings as such but rather to refine them via some transformer-based denoising autoencoder. The resulting embeddings allow to obtain competitive results while clustering them in a single pass. This shows clearly that the autoendocer allows to already concentrate on the information that is relevant for distinguishing event types."
}
Markdown (Informal)
[Unsupervised Semantic Frame Induction Revisited](https://preview.aclanthology.org/jlcl-multiple-ingestion/2023.iwcs-1.10/) (Samih & Kallmeyer, IWCS 2023)
ACL
- Younes Samih and Laura Kallmeyer. 2023. Unsupervised Semantic Frame Induction Revisited. In Proceedings of the 15th International Conference on Computational Semantics, pages 89–93, Nancy, France. Association for Computational Linguistics.