@inproceedings{inoue-etal-2021-modeling,
title = "Modeling Text using the Continuous Space Topic Model with Pre-Trained Word Embeddings",
author = "Inoue, Seiichi and
Aida, Taichi and
Komachi, Mamoru and
Asai, Manabu",
editor = "Kabbara, Jad and
Lin, Haitao and
Paullada, Amandalynne and
Vamvas, Jannis",
booktitle = "Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: Student Research Workshop",
month = aug,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2021.acl-srw.15/",
doi = "10.18653/v1/2021.acl-srw.15",
pages = "138--147",
abstract = "In this study, we propose a model that extends the continuous space topic model (CSTM), which flexibly controls word probability in a document, using pre-trained word embeddings. To develop the proposed model, we pre-train word embeddings, which capture the semantics of words and plug them into the CSTM. Intrinsic experimental results show that the proposed model exhibits a superior performance over the CSTM in terms of perplexity and convergence speed. Furthermore, extrinsic experimental results show that the proposed model is useful for a document classification task when compared with the baseline model. We qualitatively show that the latent coordinates obtained by training the proposed model are better than those of the baseline model."
}
Markdown (Informal)
[Modeling Text using the Continuous Space Topic Model with Pre-Trained Word Embeddings](https://preview.aclanthology.org/Add-Cong-Liu-Florida-Atlantic-University-author-id/2021.acl-srw.15/) (Inoue et al., ACL-IJCNLP 2021)
ACL