@inproceedings{sun2022stylized, author = {Sun, Qingfeng and Xu, Can and Hu, Huang and Wang, Yujing and Miao, Jian and Geng, Xiubo and Chen, Yining and Xu, Fei and Jiang (姜大昕), Daxin}, title = {Stylized Knowledge-Grounded Dialogue Generation via Disentangled Template Rewriting}, booktitle = {NAACL 2022}, year = {2022}, month = {April}, abstract = {Current Knowledge-Grounded Dialogue Generation (KDG) models specialize in producing rational and factual responses. However, to establish long-term relationships with users, the KDG model needs the capability to generate responses in a desired style or attribute. Thus, we study a new problem: Stylized Knowledge-Grounded Dialogue Generation (SKDG). It presents two challenges: (1) How to train a SKDG model where no triples are available. (2) How to cohere with context and preserve the knowledge when generating a stylized response. In this paper, we propose a novel disentangled template rewriting (DTR) method which generates responses via combing disentangled style templates (from monolingual stylized corpus) and content templates (from KDG corpus). The entire framework is end-to-end differentiable and learned without supervision. Extensive experiments on two benchmarks indicate that DTR achieves a significant improvement on all evaluation metrics compared with previous state-of-the-art stylized dialogue generation methods. Besides, DTR achieves comparable performance with the state-of-the-art KDG methods in standard KDG evaluation setting.}, url = {http://approjects.co.za/?big=en-us/research/publication/stylized-knowledge-grounded-dialogue-generation-via-disentangled-template-rewriting/}, }