@inproceedings{yu2022jaket, author = {Yu, Donghan and Zhu, Chenguang and Yang, Yiming and Zeng, Michael}, title = {JAKET: Joint Pre-training of Knowledge Graph and Language Understanding}, booktitle = {AAAI 2022}, year = {2022}, month = {February}, abstract = {Knowledge graphs (KGs) contain rich information about world knowledge, entities and relations. Thus, they can be great supplements to existing pre-trained language models. However, it remains a challenge to efficiently integrate information from KG into language modeling. And the understanding of a knowledge graph requires related context. We propose a novel joint pre-training framework, JAKET, to model both the knowledge graph and language. The knowledge module and language module provide essential information to mutually assist each other: the knowledge module produces embeddings for entities in text while the language module generates context-aware initial embeddings for entities and relations in the graph. Our design enables the pre-trained model to easily adapt to unseen knowledge graphs in new domains. Experimental results on several knowledge-aware NLP tasks show that our proposed framework achieves superior performance by effectively leveraging knowledge in language understanding.}, url = {http://approjects.co.za/?big=en-us/research/publication/jaket-joint-pre-training-of-knowledge-graph-and-language-understanding/}, }