@inproceedings{cui2013multi-domain, author = {Cui, Lei and Chen, Xilun and Zhang, Dongdong and Liu, Shujie and Liu, Shujie and Li, Mu and Zhou, Ming}, title = {Multi-Domain Adaptation for SMT Using Multi-Task Learning}, booktitle = {EMNLP 2013}, year = {2013}, month = {October}, abstract = {Domain adaptation for SMT usually adapts models to an individual specific domain. However, it often lacks some correlation among different domains where common knowledge could be shared to improve the overall translation quality. In this paper, we propose a novel multi-domain adaptation approach for SMT using Multi-Task Learning (MTL), with in-domain models tailored for each specific domain and a general-domain model shared by different domains. The parameters of these models are tuned jointly via MTL so that they can learn general knowledge more accurately and exploit domain knowledge better. Our experiments on a largescale English-to-Chinese translation task validate that the MTL-based adaptation approach significantly and consistently improves the translation quality compared to a non-adapted baseline. Furthermore, it also outperforms the individual adaptation of each specific domain.}, publisher = {EMNLP}, url = {http://approjects.co.za/?big=en-us/research/publication/multi-domain-adaptation-for-smt-using-multi-task-learning/}, }