@inproceedings{zuo2021arch, author = {Zuo, Simiao and Liang, Chen and Jiang, Haoming and He, Pengcheng and Liu, Xiaodong and Gao, Jianfeng and Chen, Weizhu and Zhao, Tuo}, title = {ARCH: Efficient Adversarial Regularized Training with Caching}, booktitle = {2021 Empirical Methods in Natural Language Processing}, year = {2021}, month = {September}, abstract = {Adversarial regularization can improve model generalization in many natural language processing tasks. However, conventional approaches are computationally expensive since they need to generate a perturbation for each sample in each epoch. We propose a new adversarial regularization method ARCH (adversarial regularization with caching), where perturbations are generated and cached once every several epochs. As caching all the perturbations imposes memory usage concerns, we adopt a K-nearest neighbors-based strategy to tackle this issue. The strategy only requires caching a small amount of perturbations, without introducing additional training time. We evaluate our proposed method on a set of neural machine translation and natural language understanding tasks. We observe that ARCH significantly eases the computational burden (saves up to 70\% of computational time in comparison with conventional approaches). More surprisingly, by reducing the variance of stochastic gradients, ARCH produces a notably better (in most of the tasks) or comparable model generalization. Our code is publicly available.}, url = {http://approjects.co.za/?big=en-us/research/publication/arch-efficient-adversarial-regularized-training-with-caching/}, }