@inproceedings{singh2023tree, author = {Singh, Chandan and Gao, Jianfeng}, title = {Tree Prompting: Efficient Task Adaptation without Fine-Tuning}, booktitle = {EMNLP}, year = {2023}, month = {November}, abstract = {Prompting language models (LMs) is the main interface for applying them to new tasks. However, for smaller LMs, prompting provides low accuracy compared to gradient-based finetuning. Tree Prompting is an approach to prompting which builds a decision tree of prompts, linking multiple LM calls together to solve a task. At inference time, each call to the LM is determined by efficiently routing the outcome of the previous call using the tree. Experiments on classification datasets show that Tree Prompting improves accuracy over competing methods and is competitive with fine-tuning. We also show that variants of Tree Prompting allow inspection of a model's decision-making process.}, url = {http://approjects.co.za/?big=en-us/research/publication/tree-prompting-efficient-task-adaptation-without-fine-tuning/}, }