@techreport{qin2013efficient, author = {Qin, Zhen and Petricek, Vaclav and Karampatziakis, Nikos and Li, Lihong and Langford, John}, title = {Efficient Online Bootstrapping for Large Scale Learning}, institution = {Microsoft}, year = {2013}, month = {December}, abstract = {Bootstrapping is a useful technique for estimating the uncertainty of a predictor, for example, confidence intervals for prediction. It is typically used on small to moderate sized datasets, due to its high computation cost. This work describes a highly scalable online bootstrapping strategy, implemented inside Vowpal Wabbit, that is several times faster than traditional strategies. Our experiments indicate that, in addition to providing a black box-like method for estimating uncertainty, our implementation of online bootstrapping may also help to train models with better prediction performance due to model averaging.}, url = {http://approjects.co.za/?big=en-us/research/publication/efficient-online-bootstrapping-for-large-scale-learning/}, number = {MSR-TR-2013-132}, note = {Presented at the Big Learning Workshop at the 2013 Neural Information Processing Systems Conference.}, }