@inproceedings{kim2016frustratingly, author = {Kim, Young-Bum and Stratos, Karl and Sarikaya, Ruhi}, title = {Frustratingly Easy Neural Domain Adaptation}, booktitle = {COLING}, year = {2016}, month = {December}, abstract = {Popular techniques for domain adaptation such as the feature augmentation method of Daum´e III (2009) have mostly been considered for sparse binary-valued features, but not for dense realvalued features such as those used in neural networks. In this paper, we describe simple neural extensions of these techniques. First, we propose a natural generalization of the feature augmentation method that uses K + 1 LSTMs where one model captures global patterns across all K domains and the remaining K models capture domain-specific information. Second, we propose a novel application of the framework for learning shared structures by Ando and Zhang (2005) to domain adaptation, and also provide a neural extension of their approach. In experiments on slot tagging over 17 domains, our methods give clear performance improvement over Daum´e III (2009) applied on feature-rich CRFs.}, publisher = {ACL - Association for Computational Linguistics,}, url = {http://approjects.co.za/?big=en-us/research/publication/frustratingly-easy-neural-domain-adaptation/}, edition = {COLING}, }