@inproceedings{a9fd7a90919c43d89995061ca3896b84,
title = "Shared components topic models",
abstract = "With a few exceptions, extensions to latent Dirichlet allocation (LDA) have focused on the distribution over topics for each document. Much less attention has been given to the underlying structure of the topics themselves. As a result, most topic models generate topics independently from a single underlying distribution and require millions of parameters, in the form of multinomial distributions over the vocabulary. In this paper, we introduce the Shared Components Topic Model (SCTM), in which each topic is a normalized product of a smaller number of underlying component distributions. Our model learns these component distributions and the structure of how to combine subsets of them into topics. The SCTM can represent topics in a much more compact representation than LDA and achieves better perplexity with fewer parameters.",
author = "Gormley, {Matthew R.} and Mark Dredze and {Van Durme}, Benjamin and Jason Eisner",
note = "Publisher Copyright: {\textcopyright} 2012 Association for Computational Linguistics.; 2012 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, NAACL HLT 2012 ; Conference date: 03-06-2012 Through 08-06-2012",
year = "2012",
language = "English (US)",
series = "NAACL HLT 2012 - 2012 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies, Proceedings of the Conference",
publisher = "Association for Computational Linguistics (ACL)",
pages = "783--792",
booktitle = "Proceedings of the 2012 Conference of the North American Chapter of the Association for Computational Linguistics",
address = "United States",
}