@inproceedings{schlag2019enhancing, author = {Schlag, Imanol and Smolensky, Paul and Fernandez, Roland and Jojic, Nebojsa and Schmidhuber, Jürgen and Gao, Jianfeng}, title = {Enhancing the Transformer with Explicit Relational Encoding for Math Problem Solving}, booktitle = {Thirty-third Conference on Neural Information Processing Systems (NeurIPS) 2019}, year = {2019}, month = {December}, abstract = {We incorporate Tensor-Product Representations within the Transformer in order to better support the explicit representation of relation structure. Our Tensor-Product Transformer (TP-Transformer) sets a new state of the art on the recently-introduced Mathematics Dataset containing 56 categories of free-form math word-problems. The essential component of the model is a novel attention mechanism, called TP-Attention, which explicitly encodes the relations between each Transformer cell and the other cells from which values have been retrieved by attention. TP-Attention goes beyond linear combination of retrieved values, strengthening representation-building and resolving ambiguities introduced by multiple layers of standard attention. The TP-Transformer's attention maps give better insights into how it is capable of solving the Mathematics Dataset's challenging problems. Pretrained models and code will be made available after publication.}, url = {http://approjects.co.za/?big=en-us/research/publication/enhancing-the-transformer-with-explicit-relational-encoding-for-math-problem-solving/}, }