@inproceedings{lin2015a, author = {Lin, Hongzhou and Mairal, Julien and Harchaoui, Zaid}, title = {A Universal Catalyst for First-Order Optimization}, booktitle = {Neural Information Processing Systems}, year = {2015}, month = {December}, abstract = {We introduce a generic scheme for accelerating first-order optimization methods in the sense of Nesterov, which builds upon a new analysis of the accelerated proximal point algorithm. Our approach consists of minimizing a convex objective by approximately solving a sequence of well-chosen auxiliary problems, leading to faster convergence. This strategy applies to a large class of algorithms, including gradient descent, block coordinate descent, SAG, SAGA, SDCA, SVRG, Finito/MISO, and their proximal variants. For all of these methods, we provide acceleration and explicit support for non-strongly convex objectives. In addition to theoretical speed-up, we also show that acceleration is useful in practice, especially for ill-conditioned problems where we measure significant improvements.}, publisher = {MIT Press}, url = {http://approjects.co.za/?big=en-us/research/publication/a-universal-catalyst-for-first-order-optimization/}, pages = {3384-3392}, }