@inproceedings{yang2016hierarchical, author = {Yang, Zichao and Yang, Diyi and Dyer, Chris and He, Xiaodong and Smola, Alex and Hovy, Eduard}, title = {Hierarchical Attention Networks for Document Classification}, booktitle = {NAACL 2016}, year = {2016}, month = {June}, abstract = {We propose a hierarchical attention network for document classification. Our model has two distinctive characteristics: (i) it has a hierarchical structure that mirrors the hierarchical structure of documents; (ii) it has two levels of attention mechanisms applied at the wordand sentence-level, enabling it to attend differentially to more and less important content when constructing the document representation. Experiments conducted on six large scale text classification tasks demonstrate that the proposed architecture outperform previous methods by a substantial margin. Visualization of the attention layers illustrates that the model selects qualitatively informative words and sentences.}, url = {http://approjects.co.za/?big=en-us/research/publication/hierarchical-attention-networks-document-classification/}, pages = {1480-1489}, edition = {NAACL 2016}, }