@inproceedings{zhao2023pre-trained, author = {Zhao, Xuandong and Ouyang, Siqi and Yu, Zhiguo and Wu, Ming and Li, Lei}, title = {Pre-trained Language Models Can be Fully Zero-Shot Learners}, booktitle = {ACL 2023}, year = {2023}, month = {July}, abstract = {How can we extend a pre-trained model to many language understanding tasks, without labeled or additional unlabeled data? Pre-trained language models (PLMs) have been effective for a wide range of NLP tasks. However, existing approaches either require fine-tuning on downstream labeled datasets or manually constructing proper prompts. In this paper, we propose nonparametric prompting PLM (NPPrompt) for fully zero-shot language understanding. Unlike previous methods, NPPrompt uses only pre-trained language models and does not require any labeled data or additional raw corpus for further fine-tuning, nor does it rely on humans to construct a comprehensive set of prompt label words. We evaluate NPPrompt against previous major few-shot and zero-shot learning methods on diverse NLP tasks: including text classification, text entailment, similar text retrieval, and paraphrasing. Experimental results demonstrate that our NPPrompt outperforms the previous best fully zero-shot method by big margins, with absolute gains of 12.8% in accuracy on text classification and 18.9% on the GLUE benchmark.}, url = {http://approjects.co.za/?big=en-us/research/publication/pre-trained-language-models-can-be-fully-zero-shot-learners/}, }