@article{WangCockayneChkrebtiietal., author = {Wang, Junyang and Cockayne, Jon and Chkrebtii, Oksana and Sullivan, T. J. and Oates, Chris}, title = {Bayesian numerical methods for nonlinear partial differential equations}, series = {Statistics and Computing}, volume = {31}, journal = {Statistics and Computing}, number = {5}, doi = {10.1007/s11222-021-10030-w}, language = {en} } @inproceedings{TeymurLieSullivanetal., author = {Teymur, Onur and Lie, Han Cheng and Sullivan, T. J. and Calderhead, Ben}, title = {Implicit probabilistic integrators for ODEs}, series = {Advances in Neural Information Processing Systems 31 (NIPS 2018)}, booktitle = {Advances in Neural Information Processing Systems 31 (NIPS 2018)}, language = {en} } @book{Sullivan, author = {Sullivan, T. J.}, title = {Introduction to Uncertainty Quantification}, volume = {63}, publisher = {Springer}, isbn = {978-3-319-23394-9}, doi = {10.1007/978-3-319-23395-6}, language = {en} } @article{Sullivan, author = {Sullivan, T. J.}, title = {Contributed discussion on the article "A Bayesian conjugate gradient method"}, series = {Bayesian Analysis}, volume = {14}, journal = {Bayesian Analysis}, number = {3}, doi = {10.1214/19-BA1145}, pages = {985 -- 989}, abstract = {The recent article "A Bayesian conjugate gradient method" by Cockayne, Oates, Ipsen, and Girolami proposes an approximately Bayesian iterative procedure for the solution of a system of linear equations, based on the conjugate gradient method, that gives a sequence of Gaussian/normal estimates for the exact solution. The purpose of the probabilistic enrichment is that the covariance structure is intended to provide a posterior measure of uncertainty or confidence in the solution mean. This note gives some comments on the article, poses some questions, and suggests directions for further research.}, language = {en} } @inproceedings{Sullivan, author = {Sullivan, T. J.}, title = {Well-posedness of Bayesian inverse problems in quasi-Banach spaces with stable priors}, series = {88th Annual Meeting of the International Association of Applied Mathematics and Mechanics (GAMM), Weimar 2017}, volume = {17}, booktitle = {88th Annual Meeting of the International Association of Applied Mathematics and Mechanics (GAMM), Weimar 2017}, number = {1}, doi = {10.1002/pamm.201710402}, pages = {871 -- 874}, language = {en} } @article{Sullivan, author = {Sullivan, T. J.}, title = {Well-posed Bayesian inverse problems and heavy-tailed stable quasi-Banach space priors}, series = {Inverse Problems and Imaging}, volume = {11}, journal = {Inverse Problems and Imaging}, number = {5}, doi = {10.3934/ipi.2017040}, pages = {857 -- 874}, language = {en} } @article{SchaeferSullivanOwhadi, author = {Sch{\"a}fer, Florian and Sullivan, T. J. and Owhadi, Houman}, title = {Compression, inversion, and approximate PCA of dense kernel matrices at near-linear computational complexity}, series = {Multiscale Modeling and Simulation}, volume = {19}, journal = {Multiscale Modeling and Simulation}, number = {2}, doi = {10.1137/19M129526X}, pages = {688 -- 730}, language = {en} } @article{SchusterConstantineSullivan, author = {Schuster, Ingmar and Constantine, Paul and Sullivan, T. J.}, title = {Exact active subspace Metropolis-Hastings, with applications to the Lorenz-96 system}, abstract = {We consider the application of active subspaces to inform a Metropolis-Hastings algorithm, thereby aggressively reducing the computational dimension of the sampling problem. We show that the original formulation, as proposed by Constantine, Kent, and Bui-Thanh (SIAM J. Sci. Comput., 38(5):A2779-A2805, 2016), possesses asymptotic bias. Using pseudo-marginal arguments, we develop an asymptotically unbiased variant. Our algorithm is applied to a synthetic multimodal target distribution as well as a Bayesian formulation of a parameter inference problem for a Lorenz-96 system.}, language = {en} } @article{OwhadiScovelSullivan, author = {Owhadi, Houman and Scovel, Clint and Sullivan, T. J.}, title = {On the Brittleness of Bayesian Inference}, series = {SIAM Review}, volume = {57}, journal = {SIAM Review}, number = {4}, doi = {10.1137/130938633}, pages = {566 -- 582}, abstract = {With the advent of high-performance computing, Bayesian methods are becoming increasingly popular tools for the quantification of uncertainty throughout science and industry. Since these methods can impact the making of sometimes critical decisions in increasingly complicated contexts, the sensitivity of their posterior conclusions with respect to the underlying models and prior beliefs is a pressing question to which there currently exist positive and negative answers. We report new results suggesting that, although Bayesian methods are robust when the number of possible outcomes is finite or when only a finite number of marginals of the data-generating distribution are unknown, they could be generically brittle when applied to continuous systems (and their discretizations) with finite information on the data-generating distribution. If closeness is defined in terms of the total variation (TV) metric or the matching of a finite system of generalized moments, then (1) two practitioners who use arbitrarily close models and observe the same (possibly arbitrarily large amount of) data may reach opposite conclusions; and (2) any given prior and model can be slightly perturbed to achieve any desired posterior conclusion. The mechanism causing brittleness/robustness suggests that learning and robustness are antagonistic requirements, which raises the possibility of a missing stability condition when using Bayesian inference in a continuous world under finite information.}, language = {en} } @article{OatesSullivan, author = {Oates, Chris and Sullivan, T. J.}, title = {A modern retrospective on probabilistic numerics}, series = {Statistics and Computing}, volume = {29}, journal = {Statistics and Computing}, number = {6}, doi = {10.1007/s11222-019-09902-z}, pages = {1335 -- 1351}, abstract = {This article attempts to place the emergence of probabilistic numerics as a mathematical-statistical research field within its historical context and to explore how its gradual development can be related to modern formal treatments and applications. We highlight in particular the parallel contributions of Sul'din and Larkin in the 1960s and how their pioneering early ideas have reached a degree of maturity in the intervening period, mediated by paradigms such as average-case analysis and information-based complexity. We provide a subjective assessment of the state of research in probabilistic numerics and highlight some difficulties to be addressed by future works.}, language = {en} }