@inproceedings{isonuma-etal-2020-tree,
title = "{T}ree-{S}tructured {N}eural {T}opic {M}odel",
author = "Isonuma, Masaru and
Mori, Junichiro and
Bollegala, Danushka and
Sakata, Ichiro",
editor = "Jurafsky, Dan and
Chai, Joyce and
Schluter, Natalie and
Tetreault, Joel",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.73/",
doi = "10.18653/v1/2020.acl-main.73",
pages = "800--806",
abstract = "This paper presents a tree-structured neural topic model, which has a topic distribution over a tree with an infinite number of branches. Our model parameterizes an unbounded ancestral and fraternal topic distribution by applying doubly-recurrent neural networks. With the help of autoencoding variational Bayes, our model improves data scalability and achieves competitive performance when inducing latent topics and tree structures, as compared to a prior tree-structured topic model (Blei et al., 2010). This work extends the tree-structured topic model such that it can be incorporated with neural models for downstream tasks."
}
Markdown (Informal)
[Tree-Structured Neural Topic Model](https://preview.aclanthology.org/jlcl-multiple-ingestion/2020.acl-main.73/) (Isonuma et al., ACL 2020)
ACL
- Masaru Isonuma, Junichiro Mori, Danushka Bollegala, and Ichiro Sakata. 2020. Tree-Structured Neural Topic Model. In Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics, pages 800–806, Online. Association for Computational Linguistics.