@inproceedings{miao2019degnn, author = {Miao, Xupeng and Gürel, Nezihe Merve and Zhang, Wentao and Han, Zhichao and Li, Bo and Min, Wei and Rao, Xi and Ren, Hansheng and Shan, Yinan and Shao, Yingxia and Wang, Yujie and Wu, Fan and Xue, Hui and Yang, Yaming and Zhang, Zitao and Zhao, Yang and Zhang, Shuai and Wang, Yujing and Cui, Bin and Zhang, Ce}, title = {DeGNN: Characterizing and Improving Graph Neural Networks with Graph Decomposition}, booktitle = {KDD 2021}, year = {2019}, month = {October}, abstract = {Despite the wide application of Graph Convolutional Network (GCN), one major limitation is that it does not benefit from the increasing depth and suffers from the oversmoothing problem. In this work, we first characterize this phenomenon from the information-theoretic perspective and show that under certain conditions, the mutual information between the output after $l$ layers and the input of GCN converges to 0 exponentially with respect to $l$. We also show that, on the other hand, graph decomposition can potentially weaken the condition of such convergence rate, which enabled our analysis for GraphCNN. While different graph structures can only benefit from the corresponding decomposition, in practice, we propose an automatic connectivity-aware graph decomposition algorithm, DeGNN, to improve the performance of general graph neural networks. Extensive experiments on widely adopted benchmark datasets demonstrate that DeGNN can not only significantly boost the performance of corresponding GNNs, but also achieves the state-of-the-art performances.}, url = {http://approjects.co.za/?big=en-us/research/publication/degnn-characterizing-and-improving-graph-neural-networks-with-graph-decomposition/}, }