@inproceedings{yu2021windtunnel, author = {Yu, Gyeong-in and Amizadeh, Saeed and Kim, Sehoon and Pagnoni, Artidoro and Zhang, Ce and Chun, Byung-Gon and Weimer, Markus and Interlandi, Matteo}, title = {WindTunnel: Towards Differentiable ML Pipelines Beyond a Single Model}, booktitle = {VLDB 2022}, year = {2021}, month = {September}, abstract = {While deep neural networks (DNNs) have shown to be successful in several domains like computer vision, non-DNN models such as linear models and gradient boosting trees are still considered state-of-the-art over tabular data. When using these models, data scientists often author machine learning (ML) pipelines: DAG of ML operators comprising data transforms and ML models, whereby each operator is sequentially trained one-at-a-time. Conversely, when training DNNs, layers composing the neural networks are simultaneously trained using backpropagation. In this paper, we argue that the training scheme of ML pipelines is sub-optimal because it tries to optimize a single operator at a time thus losing the chance of global optimization. We therefore propose WindTunnel: a system that translates a trained ML pipeline into a pipeline of neural network modules and jointly optimizes the modules using backpropagation. We also suggest translation methodologies for several non-differentiable operators such as gradient boosting trees and categorical feature encoders. Our experiments show that fine-tuning of the translated WindTunnel pipelines is a promising technique able to increase the final accuracy.}, url = {http://approjects.co.za/?big=en-us/research/publication/windtunnel-towards-differentiable-ml-pipelines-beyond-a-single-model/}, }