@inproceedings{duan2018attention-fused, author = {Duan, Chaoqun and Cui, Lei and Chen, Xinchi and Wei, Furu and Zhu, Conghui and Zhao, Tiejun}, title = {Attention-Fused Deep Matching Network for Natural Language Inference}, booktitle = {IJCAI 2018}, year = {2018}, month = {July}, abstract = {Natural language inference aims to predict whether a premise sentence can infer another hypothesis sentence. Recent progress on this task only relies on a shallow interaction between sentence pairs, which is insufficient for modeling complex relations. In this paper, we present an attention-fused deep matching network (AF-DMN) for natural language inference. Unlike existing models, AF-DMN takes two sentences as input and iteratively learns the attention-aware representations for each side by multi-level interactions. Moreover, we add a self-attention mechanism to fully exploit local context information within each sentence. Experiment results show that AF-DMN achieves state-of-the-art performance and outperforms strong baselines on Stanford natural language inference (SNLI), multi-genre natural language inference (MultiNLI), and Quora duplicate questions datasets.}, publisher = {IJCAI 2018}, url = {http://approjects.co.za/?big=en-us/research/publication/attention-fused-deep-matching-network-natural-language-inference/}, edition = {IJCAI 2018}, }