@inproceedings{gopinath2019compiling, author = {Gopinath, Sridhar and Ghanathe, Nikhil and Seshadri, Vivek and Sharma, Rahul}, title = {Compiling KB-Sized Machine Learning Models to Tiny IoT Devices}, organization = {ACM}, booktitle = {PLDI}, year = {2019}, month = {June}, abstract = {Recent advances in machine learning (ML) have produced KiloByte-size models that can directly run on constrained IoT devices. This approach avoids expensive communication between IoT devices and the cloud, thereby enabling energy-efficient real-time analytics. However, ML models are expressed typically in floating-point, and IoT hardware typically does not support floating-point. Therefore, running these models on IoT devices requires simulating IEEE-754 floating-point using software, which is very inefficient. We present SeeDot, a domain-specific language to express ML inference algorithms and a compiler that compiles SeeDot programs to fixed-point code that can efficiently run on constrained IoT devices. We propose 1) a novel compilation strategy that reduces the search space for some key parameters used in the fixed-point code, and 2) new efficient implementations of expensive operations. SeeDot compiles state-of-the-art KB-sized models to various microcontrollers and low-end FPGAs. We show that SeeDot outperforms 1) software emulation of floating-point (Arduino), 2) high bitwidth fixed-point (MATLAB), 3) post-training quantization (TensorFlow-Lite), and 4) floating- and fixed-point FPGA implementations generated using high-level synthesis tools.}, url = {http://approjects.co.za/?big=en-us/research/publication/compiling-kb-sized-machine-learning-models-to-constrained-hardware/}, }