@techreport{minka2005divergence, author = {Minka, Tom}, title = {Divergence Measures and Message Passing}, year = {2005}, month = {January}, abstract = {This paper presents a unifying view of message-passing algorithms, as methods to approximate a complex Bayesian network by a simpler network with minimum information divergence. In this view, the difference between mean-field methods and belief propagation is not the amount of structure they model, but only the measure of loss they minimize (`exclusive' versus `inclusive' Kullback-Leibler divergence). In each case, message-passing arises by minimizing a localized version of the divergence, local to each factor. By examining these divergence measures, we can intuit the types of solution they prefer (symmetry-breaking, for example) and their suitability for different tasks. Furthermore, by considering a wider variety of divergence measures (such as alpha-divergences), we can achieve different complexity and performance goals.}, url = {http://approjects.co.za/?big=en-us/research/publication/divergence-measures-and-message-passing/}, pages = {17}, number = {MSR-TR-2005-173}, }