@inproceedings{zlotchevski2022exploring, author = {Zlotchevski, Andrei and Drain, Dawn and Svyatkovskiy, Alexey and Clement, Colin and Sundaresan, Neel and Tufano, Michele}, title = {Exploring and Evaluating Personalized Models for Code Generation}, organization = {ACM Joint European Software Engineering Conference and Symposium}, booktitle = {Foundations of Software Engineering}, year = {2022}, month = {August}, abstract = {Large Transformer models achieved the state-of-the-art status for Natural Language Understanding tasks and are increasingly becoming the baseline model architecture for modeling source code. Transformers are usually pre-trained on large unsupervised corpora, learning token representations and transformations relevant to modeling generally available text, and are then fine-tuned on a particular downstream task of interest. While fine-tuning is a tried-and-true method for adapting a model to a new domain -- for example, question-answering on a given topic -- generalization remains an on-going challenge. In this paper, we explore and evaluate transformer model fine-tuning for personalization. In the context of generating unit tests for Java methods, we evaluate learning to personalize to a specific software project using several personalization techniques. We consider three key approaches: (i) custom fine-tuning, which allows all the model parameters to be tuned; (ii) lightweight fine-tuning, which freezes most of the model's parameters, allowing tuning of the token embeddings and softmax layer only or the final layer alone; (iii) prefix tuning, which keeps model parameters frozen, but optimizes a small project-specific prefix vector. Each of these techniques offers a trade-off in total compute cost and predictive performance, which we evaluate by code and task-specific metrics, training time, and total computational operations. We compare these fine-tuning strategies for code generation and discuss the potential generalization and cost benefits of each in various deployment scenarios.}, publisher = {ACM}, url = {http://approjects.co.za/?big=en-us/research/publication/exploring-and-evaluating-personalized-models-for-code-generation/}, }