@inproceedings{trischler2016a, author = {Trischler, Adam and Ye, Zheng and Yuan, Xingdi and He, Jing and Bachman, Philip and Suleman, Kaheer}, title = {A Parallel-Hierarchical Model for Machine Comprehension on Sparse Data}, booktitle = {2016 annual meeting of the Association for Computational Linguistics (ACL)}, year = {2016}, month = {August}, abstract = {Understanding unstructured text is a major goal within natural language processing. Comprehension tests pose questions based on short text passages to evaluate such understanding. In this work, we investigate machine comprehension on the challenging MCTest benchmark. Partly because of its limited size, prior work on MCTest has focused mainly on engineering better features. We tackle the dataset with a neural approach, harnessing simple neural networks arranged in a parallel hierarchy. The parallel hierarchy enables our model to compare the passage, question, and answer from a variety of trainable perspectives, as opposed to using a manually designed, rigid feature set. Perspectives range from the word level to sentence fragments to sequences of sentences; the networks operate only on word-embedding representations of text. When trained with a methodology designed to help cope with limited training data, our Parallel-Hierarchical model sets a new state of the art for MCTest, outperforming previous feature-engineered approaches slightly and previous neural approaches by a significant margin (over 15% absolute).}, url = {http://approjects.co.za/?big=en-us/research/publication/a-parallel-hierarchical-model-for-machine-comprehension-on-sparse-data/}, edition = {2016 annual meeting of the Association for Computational Linguistics (ACL)}, }