@inproceedings{santy2021bertologicomix, author = {Santy, Sebastin and Srinivasan, Anirudh and Choudhury, Monojit}, title = {BERTologiCoMix: How does Code-Mixing interact with Multilingual BERT?}, booktitle = {AdaptNLP EACL 2021}, year = {2021}, month = {April}, abstract = {Models such as mBERT and XLMR have shown success in solving Code-Mixed NLP tasks even though they were not exposed to such text during pretraining. Code-Mixed NLP models have relied on using synthetically generated data along with naturally occurring data to improve their performance. Finetun-ing1mBERT on such data improves it’s code-mixed performance, but the benefits of using the different types of Code-Mixed data aren’t clear. In this paper, we study the impact of fine-tuning with different types of code-mixed data and outline the changes that occur to the model during such finetuning. Our findings suggest that using naturally occurring code-mixed data brings in the best performance improvement after finetuning and that finetuning with any type of code-mixed text improves the responsivity of it’s attention heads to code-mixed text inputs}, url = {http://approjects.co.za/?big=en-us/research/publication/bertologicomix-how-does-code-mixing-interact-with-multilingual-bert/}, }