@inproceedings{zheng2021consistency, author = {Zheng, Bo and Dong, Li and Huang, Shaohan and Wang, Wenhui and Chi, Zewen and Singhal, Saksham and Che, Wanxiang and Liu, Ting and Song, Xia and Wei, Furu}, title = {Consistency Regularization for Cross-Lingual Fine-Tuning}, booktitle = {ACL-IJCNLP 2021}, year = {2021}, month = {June}, abstract = {Fine-tuning pre-trained cross-lingual language models can transfer task-specific supervision from one language to the others. In this work, we propose to improve cross-lingual fine-tuning with consistency regularization. Specifically, we use example consistency regularization to penalize the prediction sensitivity to four types of data augmentations, i.e., subword sampling, Gaussian noise, code-switch substitution, and machine translation. In addition, we employ model consistency to regularize the models trained with two augmented versions of the same training set. Experimental results on the XTREME benchmark show that our method significantly improves cross-lingual fine-tuning across various tasks, including text classification, question answering, and sequence labeling.}, url = {http://approjects.co.za/?big=en-us/research/publication/consistency-regularization-for-cross-lingual-fine-tuning/}, }