@inproceedings{li2020zero-resource, author = {Li, Linxiao and Xu, Can and Wu, Wei and Zhao, Yufan and Zhao, Xueliang and Tao, Chongyang}, title = {Zero-Resource Knowledge-Grounded Dialogue Generation}, booktitle = {2020 Neural Information Processing Systems}, year = {2020}, month = {July}, abstract = {While neural conversation models have shown great potentials towards generating informative and engaging responses via introducing external knowledge, learning such a model often requires knowledge-grounded dialogues that are difficult to obtain. To overcome the data challenge and reduce the cost of building a knowledge grounded dialogue system, we explore the problem under a zero-resource setting by assuming no context-knowledge-response triples are needed for training. To this end, we propose representing the knowledge that bridges a context and a response and the way that the knowledge is expressed as latent variables, and devise a variational approach that can effectively estimate a generation model from a dialogue corpus and a knowledge corpus that are independent with each other. Evaluation results on three benchmarks of knowledge-grounded dialogue generation indicate that our model can achieve comparable performance with state of-the-art methods that rely on knowledge-grounded dialogues for training, and exhibits a good generalization ability over different topics and different datasets. Code is available at https://github.com/nlpxucan/ZRKGC.}, url = {http://approjects.co.za/?big=en-us/research/publication/zero-resource-knowledge-grounded-dialogue-generation-2/}, }