@inproceedings{hayou2023width, author = {Hayou, Soufiane and Yang, Greg}, title = {Width and Depth Limits Commute in Residual Networks}, booktitle = {ICML 2023}, year = {2023}, month = {February}, abstract = {We show that taking the width and depth to infinity in a deep neural network with skip connections, when branches are scaled by 1/√depth (the only non-trivial scaling), result in the same covariance structure no matter how that limit is taken. This explains why the standard infinite-width-then-depth approach provides practical insights even for networks with depth of the same order as width. We also demonstrate that the pre-activations, in this case, have Gaussian distributions which has direct applications in Bayesian deep learning. We conduct extensive simulations that show an excellent match with our theoretical findings.}, url = {http://approjects.co.za/?big=en-us/research/publication/width-and-depth-limits-commute-in-deep-residual-network/}, }