@article{crawford2018bayesian, author = {Crawford, Lorin and Wood, Kris C. and Zhou, Xiang and Mukherjee, Sayan}, title = {Bayesian Approximate Kernel Regression with Variable Selection}, year = {2018}, month = {June}, abstract = {Nonlinear kernel regression models are often used in statistics and machine learning because they are more accurate than linear models. Variable selection for kernel regression models is a challenge partly because, unlike the linear regression setting, there is no clear concept of an effect size for regression coefficients. In this paper, we propose a novel framework that provides an effect size analog for each explanatory variable in Bayesian kernel regression models when the kernel is shift-invariant — for example, the Gaussian kernel. We use function analytic properties of shift-invariant reproducing kernel Hilbert spaces (RKHS) to define a linear vector space that: (i) captures nonlinear structure, and (ii) can be projected onto the original explanatory variables. This projection onto the original explanatory variables serves as an analog of effect sizes. The specific function analytic property we use is that shift-invariant kernel functions can be approximated via random Fourier bases. Based ...}, url = {http://approjects.co.za/?big=en-us/research/publication/bayesian-approximate-kernel-regression-with-variable-selection/}, pages = {1710-1721}, journal = {Journal of the American Statistical Association}, volume = {113}, number = {524}, }