@inproceedings{xu2020discourse-aware, author = {Xu, Jiacheng and Gan, Zhe and Cheng, Yu and Liu, JJ (Jingjing)}, title = {Discourse-Aware Neural Extractive Text Summarization}, booktitle = {ACL 2020}, year = {2020}, month = {July}, abstract = {Recently BERT has been adopted for document encoding in state-of-the-art text summarization models. However, sentence-based extractive models often result in redundant or uninformative phrases in the extracted summaries. Also, long-range dependencies throughout a document are not well captured by BERT, which is pre-trained on sentence pairs instead of documents. To address these issues, we present a discourse-aware neural summarization model - DISCOBERT. DISCOBERT extracts sub-sentential discourse units (instead of sentences) as candidates for extractive selection on a finer granularity. To capture the long-range dependencies among discourse units, structural discourse graphs are constructed based on RST trees and coreference mentions, encoded with Graph Convolutional Networks. Experiments show that the proposed model outperforms state-of-the-art methods by a significant margin on popular summarization benchmarks compared to other BERT-base models.}, publisher = {Association for Computational Linguistics}, url = {http://approjects.co.za/?big=en-us/research/publication/discourse-aware-neural-extractive-text-summarization/}, }