@inproceedings{norouzi2015efficient, author = {Norouzi, Mohammad and Collins, Maxwell D. and Johnson, Matthew and Fleet, David J. and Kohli, Pushmeet}, title = {Efficient Non-greedy Optimization of Decision Trees}, booktitle = {NIPS'15 Proceedings of the 28th International Conference on Neural Information Processing Systems}, year = {2015}, month = {December}, abstract = {Decision trees and randomized forests are widely used in computer vision and machine learning. Standard algorithms for decision tree induction optimize the split functions one node at a time according to some splitting criteria. This greedy procedure often leads to suboptimal trees. In this paper, we present an algorithm for optimizing the split functions at all levels of the tree jointly with the leaf parameters, based on a global objective. We show that the problem of finding optimal linear-combination (oblique) splits for decision trees is related to structured prediction with latent variables, and we formulate a convex-concave upper bound on the tree’s empirical loss. Computing the gradient of the proposed surrogate objective with respect to each training exemplar is O(d2), where d is the tree depth, and thus training deep trees is feasible. The use of stochastic gradient descent for optimization enables effective training with large datasets. Experiments on several classification benchmarks demonstrate that the resulting non-greedy decision trees outperform greedy decision tree baselines.}, publisher = {MIT Press Cambridge}, url = {http://approjects.co.za/?big=en-us/research/publication/efficient-non-greedy-optimization-decision-trees/}, pages = {1729-1737}, edition = {NIPS'15 Proceedings of the 28th International Conference on Neural Information Processing Systems}, }