@article{Stochastic:2324,
      recid = {2324},
      author = {He, Jingyu},
      title = {XBART: A Scalable Stochastic Algorithm for Supervised  Machine Learning with Additive Tree Ensembles},
      publisher = {The University of Chicago},
      school = {Ph.D.},
      address = {2020-06},
      pages = {107},
      abstract = {This dissertation develops a novel stochastic tree  ensemble method for nonlinear regression, which I refer to  as XBART, short for Accelerated Bayesian Additive  Regression Trees. By combining regularization and  stochastic search strategies from Bayesian modeling with  computationally efficient techniques from recursive  partitioning approaches, the new method attains  state-of-the-art performance: in many settings it is both  faster and more accurate than the widely-used XGBoost  algorithm. Via careful simulation studies, I demonstrate  that our new approach provides accurate point-wise  estimates of the mean function and does so faster than  popular alternatives, such as BART, XGBoost and neural  networks (using Keras). This dissertation also prove a  number of basic theoretical results about the new  algorithm, including consistency of the single tree version  of the model and stationarity of the Markov chain produced  by the ensemble version. Furthermore, I demonstrate that  initializing standard Bayesian additive regression trees  Markov chain Monte Carlo (MCMC) at XBART-fitted trees  considerably improves credible interval coverage and  reduces total run-time.},
      url = {http://knowledge.uchicago.edu/record/2324},
      doi = {https://doi.org/10.6082/uchicago.2324},
}