@inproceedings{lopez-zapata-etal-2025-attention,
title = "Attention-Seeker: Dynamic Self-Attention Scoring for Unsupervised Keyphrase Extraction",
author = "Lopez Zapata, Erwin Daniel and
Tang, Cheng and
Shimada, Atsushi",
editor = "Rambow, Owen and
Wanner, Leo and
Apidianaki, Marianna and
Al-Khalifa, Hend and
Eugenio, Barbara Di and
Schockaert, Steven",
booktitle = "Proceedings of the 31st International Conference on Computational Linguistics",
month = jan,
year = "2025",
address = "Abu Dhabi, UAE",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2025.coling-main.335/",
pages = "5011--5026",
abstract = "This paper proposes Attention-Seeker, an unsupervised keyphrase extraction method that leverages self-attention maps from a Large Language Model to estimate the importance of candidate phrases. Our approach identifies specific components {--} such as layers, heads, and attention vectors {--} where the model pays significant attention to the key topics of the text. The attention weights provided by these components are then used to score the candidate phrases. Unlike previous models that require manual tuning of parameters (e.g., selection of heads, prompts, hyperparameters), Attention-Seeker dynamically adapts to the input text without any manual adjustments, enhancing its practical applicability. We evaluate Attention-Seeker on four publicly available datasets: Inspec, SemEval2010, SemEval2017, and Krapivin. Our results demonstrate that, even without parameter tuning, Attention-Seeker outperforms most baseline models, achieving state-of-the-art performance on three out of four datasets, particularly excelling in extracting keyphrases from long documents."
}
Markdown (Informal)
[Attention-Seeker: Dynamic Self-Attention Scoring for Unsupervised Keyphrase Extraction](https://preview.aclanthology.org/fix-sig-urls/2025.coling-main.335/) (Lopez Zapata et al., COLING 2025)
ACL