@inproceedings{ding2021levenshtein, author = {Ding, Shuoyang and Junczys-Dowmunt, Marcin and Post, Matt and Koehn, Philipp}, title = {Levenshtein Training for Word-level Quality Estimation}, booktitle = {2021 Empirical Methods in Natural Language Processing}, year = {2021}, month = {September}, abstract = {We propose a novel scheme to use the Levenshtein Transformer to perform the task of word-level quality estimation. A Levenshtein Transformer is a natural fit for this task: trained to perform decoding in an iterative manner, a Levenshtein Transformer can learn to post-edit without explicit supervision. To further minimize the mismatch between the translation task and the word-level QE task, we propose a two-stage transfer learning procedure on both augmented data and human post-editing data. We also propose heuristics to construct reference labels that are compatible with subword-level finetuning and inference. Results on WMT 2020 QE shared task dataset show that our proposed method has superior data efficiency under the data-constrained setting and competitive performance under the unconstrained setting.}, url = {http://approjects.co.za/?big=en-us/research/publication/levenshtein-training-for-word-level-quality-estimation/}, }