@inproceedings{bishop1999variational, author = {Bishop, Christopher}, title = {Variational Principal Components}, booktitle = {Proceedings Ninth International Conference on Artificial Neural Networks, ICANN'99}, year = {1999}, month = {January}, abstract = {One of the central issues in the use of principal component analysis (PCA) for data modelling is that of choosing the appropriate number of retained components. This problem was recently addressed through the formulation of a Bayesian treatment of PCA (Bishop, 1998) in terms of a probabilistic latent variable model. A central feature of this approach is that the effective dimensionality of the latent space (equivalent to the number of retained principal components) is determined automatically as part of the Bayesian inference procedure. In common with most non-trivial Bayesian models, however, the required marginalizations are analytically intractable, and so an approximation scheme based on a local Gaussian representation of the posterior distribution was employed. In this paper we develop an alternative, variational formulation of Bayesian PCA, based on a factorial representation of the posterior distribution. This approach is computationally efficient, and unlike other approximation schemes, it maximizes a rigourous lower bound on the marginal log probability of the observed data.}, publisher = {IEE}, url = {http://approjects.co.za/?big=en-us/research/publication/variational-principal-components/}, pages = {509-514}, volume = {1}, edition = {Proceedings Ninth International Conference on Artificial Neural Networks, ICANN'99}, }