@misc{sharaf2020meta-learning, author = {Sharaf, Amr and Hassan Awadalla, Hany and Daumé III, Hal}, title = {Meta-Learning For Few-Shot NMT Adaptation}, howpublished = {ArXiv preprint}, year = {2020}, month = {April}, abstract = {We present META-MT, a meta-learning approach to adapt Neural Machine Translation (NMT) systems in a few-shot setting. META-MT provides a new approach to make NMT models easily adaptable to many target domains with the minimal amount of in-domain data. We frame the adaptation of NMT systems as a meta-learning problem, where we learn to adapt to new unseen domains based on simulated offline meta-training domain adaptation tasks. We evaluate the proposed metalearning strategy on ten domains with general large scale NMT systems. We show that META-MT significantly outperforms classical domain adaptation when very few indomain examples are available. Our experiments shows that META-MT can outperform classical fine-tuning by up to 2.5 BLEU points after seeing only 4,000 translated words (300 parallel sentences).}, url = {http://approjects.co.za/?big=en-us/research/publication/meta-learning-for-few-shot-nmt-adaptation/}, }