@inproceedings{chen2020hitter, author = {Chen, Sanxing and Liu, Xiaodong and Gao, Jianfeng and Jiao, Jian and Zhang, Ruofei and Ji, Yangfeng}, title = {HittER: Hierarchical Transformers for Knowledge Graph Embeddings}, booktitle = {EMNLP 2021}, year = {2020}, month = {August}, abstract = {This paper examines the challenging problem of learning representations of entities and relations in a complex multi-relational knowledge graph. We propose HittER, a Hierarchical Transformer model to jointly learn Entity-relation composition and Relational contextualization based on a source entity's neighborhood. Our proposed model consists of two different Transformer blocks: the bottom block extracts features of each entity-relation pair in the local neighborhood of the source entity and the top block aggregates the relational information from the outputs of the bottom block. We further design a masked entity prediction task to balance information from the relational context and the source entity itself. Evaluated on the task of link prediction, our approach achieves new state-of-the-art results on two standard benchmark datasets FB15K-237 and WN18RR.}, url = {http://approjects.co.za/?big=en-us/research/publication/hitter-hierarchical-transformers-for-knowledge-graph-embeddings/}, }