@inproceedings{chi2021xlm-e, author = {Chi, Zewen and Huang, Shaohan and Dong, Li and Ma, Shuming and Singhal, Saksham and Bajaj, Payal and Song, Xia and Wei, Furu}, title = {XLM-E: Cross-lingual Language Model Pre-training via ELECTRA}, booktitle = {ACL 2022}, year = {2021}, month = {June}, abstract = {In this paper, we introduce ELECTRA-style tasks to cross-lingual language model pre-training. Specifically, we present two pre-training tasks, namely multilingual replaced token detection, and translation replaced token detection. Besides, we pretrain the model, named as XLM-E, on both multilingual and parallel corpora. Our model outperforms the baseline models on various cross-lingual understanding tasks with much less computation cost. Moreover, analysis shows that XLM-E tends to obtain better cross-lingual transferability.}, url = {http://approjects.co.za/?big=en-us/research/publication/xlm-e-cross-lingual-language-model-pre-training-via-electra/}, }