@unpublished{xu2020mc-bert, author = {Xu, Zhenhui and Gong, Linyuan and Ke, Guolin and He, Di and Zheng, Shuxin and Wang, Liwei and Bian, Jiang and Liu, Tie-Yan}, title = {MC-BERT: Efficient Language Pre-Training via a Meta Controller}, year = {2020}, month = {June}, abstract = {Pre-trained contextual representations (e.g., BERT) have become the foundation to achieve state-of-the-art results on many NLP tasks. However, large-scale pre-training is computationally expensive. ELECTRA, an early attempt to accelerate pre-training, trains a discriminative model that predicts whether each input token was replaced by a generator. Our studies reveal that ELECTRA's success is mainly due to its reduced complexity of the pre-training task: the binary classification (replaced token detection) is more efficient to learn than the generation task (masked language modeling). However, such a simplified task is less semantically informative. To achieve better efficiency and effectiveness, we propose a novel meta-learning framework, MC-BERT. The pre-training task is a multi-choice cloze test with a reject option, where a meta controller network provides training input and candidates. Results over GLUE natural language understanding benchmark demonstrate that our proposed method is both efficient and effective: it outperforms baselines on GLUE semantic tasks given the same computational budget.}, url = {http://approjects.co.za/?big=en-us/research/publication/mc-bert-efficient-language-pre-training-via-a-meta-controller/}, }