@inproceedings{gopi2022private, author = {Gopi, Sivakanth and Lee, Yin Tat and Liu, Daogao}, title = {Private Convex Optimization via Exponential Mechanism}, booktitle = {COLT 2022}, year = {2022}, month = {March}, abstract = {In this paper, we study private optimization problems for non-smooth convex functions on . We show that modifying the exponential mechanism by adding an regularizer to and sampling from recovers both the known optimal empirical risk and population loss under -DP. Furthermore, we show how to implement this mechanism using queries to for the DP-SCO where is the number of samples/users and is the ambient dimension. We also give a (nearly) matching lower bound on the number of evaluation queries. Our results utilize the following tools that are of independent interest: (1) We prove Gaussian Differential Privacy (GDP) of the exponential mechanism if the loss function is strongly convex and the perturbation is Lipschitz. Our privacy bound is optimal as it includes the privacy of Gaussian mechanism as a special case and is proved using the isoperimetric inequality for strongly log-concave measures. (2) We show how to sample from for -Lipschitz with error in total variation (TV) distance using unbiased queries to . This is the first sampler whose query complexity has polylogarithmic dependence on both dimension and accuracy .}, url = {http://approjects.co.za/?big=en-us/research/publication/private-convex-optimization-via-exponential-mechanism/}, }