@article{arjevani2022lower, author = {Arjevani, Yossi and Carmon, Yair and C. Duchi, John and Foster, Dylan and Srebro, Nathan and E. Woodworth, Blake}, title = {Lower Bounds for Non-Convex Stochastic Optimization}, year = {2022}, month = {May}, abstract = {We lower bound the complexity of finding $\epsilon$-stationary points (with gradient norm at most $\epsilon$) using stochastic first-order methods. In a well-studied model where algorithms access smooth, potentially non-convex functions through queries to an unbiased stochastic gradient oracle with bounded variance, we prove that (in the worst case) any algorithm requires at least $\epsilon^[-4]$ queries to find an $\epsilon$ stationary point. The lower bound is tight, and establishes that stochastic gradient descent is minimax optimal in this model. In a more restrictive model where the noisy gradient estimates satisfy a mean-squared smoothness property, we prove a lower bound of $\epsilon^[-3]$ queries, establishing the optimality of recently proposed variance reduction techniques.}, url = {http://approjects.co.za/?big=en-us/research/publication/lower-bounds-for-non-convex-stochastic-optimization/}, journal = {Mathematical Programming}, }