@inproceedings{auli2013joint, author = {Auli, Michael and Galley, Michel and Quirk, Chris and Zweig, Geoffrey}, title = {Joint Language and Translation Modeling with Recurrent Neural Networks}, booktitle = {Proc. of EMNLP}, year = {2013}, month = {October}, abstract = {We present a joint language and translation model based on a recurrent neural network which predicts target words based on an unbounded history of both source and target words. The weaker independence assumptions of this model result in a vastly larger search space compared to related feed forward-based language or translation models. We tackle this issue with a new lattice rescoring algorithm and demonstrate its effectiveness empirically. Our joint model builds on a well known recurrent neural network language model (Mikolov, 2012) augmented by a layer of additional inputs from the source language. We show competitive accuracy compared to the traditional channel model features. Our best results improve the output of a system trained on WMT2012 French-English data by up to 1.5 BLEU, and by 1.1 BLEU on average across several test sets.}, url = {http://approjects.co.za/?big=en-us/research/publication/joint-language-and-translation-modeling-with-recurrent-neural-networks/}, edition = {Proc. of EMNLP}, }