@article{shalyminov2020hybrid, author = {Shalyminov, Igor and Sordoni, Alessandro and Atkinson, Adam and Schulz, Hannes}, title = {Hybrid Generative-Retrieval Transformers For Dialogue Domain Adaptation}, year = {2020}, month = {March}, abstract = {Domain adaptation has recently become a key problem in dialogue systems research. Deep learning, while being the preferred technique for modeling such systems, works best given massive training data. However, in the real-world scenario, such resources aren’t available for every new domain, so the ability to train with a few dialogue examples can be considered essential. Pre-training on large data sources and adapting to the target data has become the standard method for few-shot problems within the deep learning framework. In this paper, we present the winning entry at the fast domain adaptation task of DSTC-8, a hybrid generative-retrieval model based on GPT-2 fine-tuned to the multi-domain MetaLWOz dataset1 . Robust and diverse in response generation, our model uses retrieval logic as a fallback, being SoTA on MetaLWOz in human evaluation (>4% improvement over the 2nd place system) and attaining competitive generalization performance in adaptation to the unseen MultiWOZ dataset.}, url = {http://approjects.co.za/?big=en-us/research/publication/hybrid-generative-retrieval-transformers-for-dialogue-domain-adaptation/}, journal = {arXiv preprint arXiv:2003.01680}, }