@inproceedings{wang-etal-2021-phrase,
title = "Phrase-{BERT}: Improved Phrase Embeddings from {BERT} with an Application to Corpus Exploration",
author = "Wang, Shufan and
Thompson, Laure and
Iyyer, Mohit",
editor = "Moens, Marie-Francine and
Huang, Xuanjing and
Specia, Lucia and
Yih, Scott Wen-tau",
booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing",
month = nov,
year = "2021",
address = "Online and Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/fix-sig-urls/2021.emnlp-main.846/",
doi = "10.18653/v1/2021.emnlp-main.846",
pages = "10837--10851",
abstract = "Phrase representations derived from BERT often do not exhibit complex phrasal compositionality, as the model relies instead on lexical similarity to determine semantic relatedness. In this paper, we propose a contrastive fine-tuning objective that enables BERT to produce more powerful phrase embeddings. Our approach (Phrase-BERT) relies on a dataset of diverse phrasal paraphrases, which is automatically generated using a paraphrase generation model, as well as a large-scale dataset of phrases in context mined from the Books3 corpus. Phrase-BERT outperforms baselines across a variety of phrase-level similarity tasks, while also demonstrating increased lexical diversity between nearest neighbors in the vector space. Finally, as a case study, we show that Phrase-BERT embeddings can be easily integrated with a simple autoencoder to build a phrase-based neural topic model that interprets topics as mixtures of words and phrases by performing a nearest neighbor search in the embedding space. Crowdsourced evaluations demonstrate that this phrase-based topic model produces more coherent and meaningful topics than baseline word and phrase-level topic models, further validating the utility of Phrase-BERT."
}
Markdown (Informal)
[Phrase-BERT: Improved Phrase Embeddings from BERT with an Application to Corpus Exploration](https://preview.aclanthology.org/fix-sig-urls/2021.emnlp-main.846/) (Wang et al., EMNLP 2021)
ACL