@inproceedings{xu2023small, author = {Xu, Canwen and Xu, Yichong and Wang, Shuohang and Liu, Yang and Zhu, Chenguang and McAuley, Julian}, title = {Small Models are Valuable Plug-ins for Large Language Models}, year = {2023}, month = {May}, abstract = {Large language models (LLMs) such as GPT-3 and GPT-4 are powerful but their weights are often publicly unavailable and their immense sizes make the models difficult to be tuned with common hardware. As a result, effectively tuning these models with large-scale supervised data can be challenging. As an alternative, In-Context Learning (ICL) can only use a small number of supervised examples due to context length limits. In this paper, we propose Super In-Context Learning (SuperICL) which allows black-box LLMs to work with locally fine-tuned smaller models, resulting in superior performance on supervised tasks. Our experiments demonstrate that SuperICL can improve performance beyond state-of-the-art fine-tuned models while addressing the instability problem of in-context learning. Furthermore, SuperICL can enhance the capabilities of smaller models, such as multilinguality and interpretability.}, publisher = {arXiv 2305.08848}, url = {http://approjects.co.za/?big=en-us/research/publication/small-models-are-valuable-plug-ins-for-large-language-models/}, }