@inproceedings{pryzant2022automatic, author = {Pryzant, Reid and Yang, Ziyi and Xu, Yichong and Zhu, Chenguang and Zeng, Michael}, title = {Automatic Rule Induction for Efficient Semi-Supervised Learning}, booktitle = {Findings of Empirical Methods in Natural Language Processing (EMNLP), Abu Dhabi, the United Arab Emirates, 2022}, year = {2022}, month = {December}, abstract = {Semi-supervised learning has shown promise in allowing NLP models to generalize from small amounts of labeled data. Meanwhile, pretrained transformer models act as black-box correlation engines that are difficult to explain and sometimes behave unreliably. In this paper, we propose tackling both of these challenges via Automatic Rule Induction (ARI), a simple and general-purpose framework for the automatic discovery and integration of symbolic rules into pretrained transformer models. First, we extract weak symbolic rules from low-capacity machine learning models trained on small amounts of labeled data. Next, we use an attention mechanism to integrate these rules into high-capacity pretrained transformer models. Last, the rule-augmented system becomes part of a self-training framework to boost supervision signal on unlabeled data. These steps can be layered beneath a variety of existing weak supervision and semi-supervised NLP algorithms in order to improve performance and interpretability. Experiments across nine sequence classification and relation extraction tasks suggest that ARI can improve state-of-the-art methods with no manual effort and minimal computational overhead.}, url = {http://approjects.co.za/?big=en-us/research/publication/automatic-rule-induction-for-efficient-semi-supervised-learning/}, }