@inproceedings{gu2018universal, author = {Gu, Jiatao and Hassan Awadalla, Hany and Devlin, Jacob}, title = {Universal Neural Machine Translation for Extremely Low Resource Languages}, booktitle = {NAACL}, year = {2018}, month = {June}, abstract = {UniNMT In this paper, we propose a new universal machine translation approach focusing on languages with a limited amount of parallel data. Our proposed approach utilizes a transfer-learning approach to share lexical and sentence level representations across multiple source languages into one target language. The lexical part is shared through a Universal Lexical Representation to support multi-lingual word-level sharing. The sentence-level sharing is represented by a model of experts from all source languages that share the source encoders with all other languages. This enables the low-resource language to utilize the lexical and sentence representations of the higher resource languages. Our approach is able to achieve 23 BLEU on Romanian-English WMT2016 using a tiny parallel corpus of 6k sentences, compared to the 18 BLEU of strong baseline system which uses multi-lingual training and back-translation. Furthermore, we show that the proposed approach can achieve almost 20 BLEU on the same dataset through fine-tuning a pre-trained multi-lingual system in a zero-shot setting.}, url = {http://approjects.co.za/?big=en-us/research/publication/universal-neural-machine-translation-extremely-low-resource-languages/}, }