@techreport{thiesson1997learning, author = {Thiesson, Bo and Meek, Chris and Chickering, Max and Heckerman, David}, title = {Learning Mixtures of DAG Models}, year = {1997}, month = {December}, abstract = {We describe computationally efficient methods for Bayesian model selection. The methods select among mixtures in which each mixture component is a directed acyclic graphical model (mixtures of DAGs or MDAGs), and can be applied to incomplete data sets. The model-selection criterion that we consider is the posterior probability of the model (structure) given data. Our model-selection problem is difficult because (1) the number of possible model structures grows super-exponentially with the number of random variables and (2) missing data necessitates the use of computationally slow approximations of model posterior probability. We argue that simple search-and-score algorithms are infeasible for a variety of problems, and introduce a feasible approach in which parameter and structure search is interleaved and expected data is treated as real data. Our approach can be viewed as the combinations of (1) a modified Cheeseman-Stutz asymptotic approximation for model posterior probability and (2) the Expectation-Maximization algorithm. We evaluate our procedure for selecting among MDAGs on synthetic and real examples.}, publisher = {Morgan Kaufmann Publishers}, url = {http://approjects.co.za/?big=en-us/research/publication/learning-mixtures-of-dag-models/}, pages = {24}, number = {MSR-TR-97-30}, note = {Revised May 1998}, }