@inproceedings{nie2018operation-guided, author = {Nie, Feng and Wang, Jinpeng and Yao, Jin-Ge and Pan, Rong and Lin, Chin-Yew}, title = {Operation-guided Neural Networks for High Fidelity Data-To-Text Generation}, booktitle = {Empirical Methods in Natural Language Processing}, year = {2018}, month = {September}, abstract = {Recent neural models for data-to-text generation are mostly based on data-driven end-to-end training over encoder-decoder networks. Even though the generated texts are mostly fluent and informative, they often generate descriptions that are not consistent with the input structured data. This is a critical issue especially in domains that require inference or calculations over raw data. In this paper, we attempt to improve the fidelity of neural data-to-text generation by utilizing pre-executed symbolic operations. We propose a framework called Operation-guided Attention-based sequence-to-sequence network (OpAtt), with a specifically designed gating mechanism as well as a quantization module for operation results to utilize information from pre-executed operations. Experiments on two sports datasets show our proposed method clearly improves the fidelity of the generated texts to the input structured data.}, publisher = {Association for Computational Linguistics}, url = {http://approjects.co.za/?big=en-us/research/publication/operation-guided-neural-networks-for-high-fidelity-data-to-text-generation/}, pages = {3879-3889}, }