@inproceedings{jiang2021towards, author = {Jiang, Jiawei and Gan, Shaoduo and Liu, Yue and Wang, Fanlin and Alonso, Gustavo and Klimovic, Ana and Singla, Ankit and Wu, Wentao and Zhang, Ce}, title = {Towards Demystifying Serverless Machine Learning Training}, booktitle = {ACM SIGMOD International Conference on Management of Data (SIGMOD 2021)}, year = {2021}, month = {June}, abstract = {The appeal of serverless (FaaS) has triggered a growing interest on how to use it in data-intensive applications such as ETL, query processing, or machine learning (ML). Several systems exist for training large-scale ML models on top of serverless infrastructures (e.g., AWS Lambda) but with inconclusive results in terms of their performance and relative advantage over “serverful” infrastructures (IaaS). In this paper we present a systematic, comparative study of distributed ML training over FaaS and IaaS. We present a design space covering design choices such as optimization algorithms and synchronization protocols, and implement a platform, LambdaML, that enables a fair comparison between FaaS and IaaS. We present experimental results using LambdaML, and further develop an analytic model to capture cost/performance tradeoffs that must be considered when opting for a serverless infrastructure. Our results indicate that ML training pays off in serverless only for models with efficient (i.e., reduced) communication and that quickly converge. In general, FaaS can be much faster but it is never significantly cheaper than IaaS.}, url = {http://approjects.co.za/?big=en-us/research/publication/towards-demystifying-serverless-machine-learning-training/}, }