@inproceedings{phang2023hypertuning, author = {Phang, Jason and Mao, Yi and He, Pengcheng and Chen, Weizhu}, title = {HyperTuning: Toward Adapting Large Language Models without Back-propagation}, booktitle = {ICML 2023}, year = {2023}, month = {June}, abstract = {Fine-tuning large language models for different tasks can be costly and inefficient, and even methods that reduce the number of tuned parameters still require full gradient-based optimization. We propose HyperTuning, a novel approach to model adaptation that uses a hypermodel to generate task-specific parameters for a fixed downstream model. We demonstrate a simple setup for hypertuning with HyperT5, a T5-based hypermodel that produces soft prefixes or LoRA parameters for a frozen T5 model from few-shot examples. We train HyperT5 in two stages: first, hyperpretraining with a modified conditional language modeling objective that trains a hypermodel to generate parameters; second, multi-task fine-tuning (MTF) on a large number of diverse language tasks. We evaluate HyperT5 on P3, MetaICL and Super-NaturalInstructions datasets, and show that it can effectively generate parameters for unseen tasks. Moreover, we show that using hypermodel-generated parameters as initializations for further parameter-efficient fine-tuning improves performance. HyperTuning can thus be a flexible and efficient way to leverage large language models for diverse downstream applications.}, url = {http://approjects.co.za/?big=en-us/research/publication/hypertuning-toward-adapting-large-language-models-without-back-propagation/}, }