@article{SchusterConstantineSullivan, author = {Schuster, Ingmar and Constantine, Paul and Sullivan, T. J.}, title = {Exact active subspace Metropolis-Hastings, with applications to the Lorenz-96 system}, abstract = {We consider the application of active subspaces to inform a Metropolis-Hastings algorithm, thereby aggressively reducing the computational dimension of the sampling problem. We show that the original formulation, as proposed by Constantine, Kent, and Bui-Thanh (SIAM J. Sci. Comput., 38(5):A2779-A2805, 2016), possesses asymptotic bias. Using pseudo-marginal arguments, we develop an asymptotically unbiased variant. Our algorithm is applied to a synthetic multimodal target distribution as well as a Bayesian formulation of a parameter inference problem for a Lorenz-96 system.}, language = {en} } @inproceedings{SchusterStrathmannPaigeetal., author = {Schuster, Ingmar and Strathmann, Heiko and Paige, Brooks and Sejdinovic, Dino}, title = {Kernel Sequential Monte Carlo}, series = {Joint European Conference on Machine Learning and Knowledge Discovery in Databases}, booktitle = {Joint European Conference on Machine Learning and Knowledge Discovery in Databases}, doi = {10.1007/978-3-319-71249-9_24}, pages = {390 -- 409}, abstract = {We propose kernel sequential Monte Carlo (KSMC), a framework for sampling from static target densities. KSMC is a family of sequential Monte Carlo algorithms that are based on building emulator models of the current particle system in a reproducing kernel Hilbert space. We here focus on modelling nonlinear covariance structure and gradients of the target. The emulator's geometry is adaptively updated and subsequently used to inform local proposals. Unlike in adaptive Markov chain Monte Carlo, continuous adaptation does not compromise convergence of the sampler. KSMC combines the strengths of sequental Monte Carlo and kernel methods: superior performance for multimodal targets and the ability to estimate model evidence as compared to Markov chain Monte Carlo, and the emulator's ability to represent targets that exhibit high degrees of nonlinearity. As KSMC does not require access to target gradients, it is particularly applicable on targets whose gradients are unknown or prohibitively expensive. We describe necessary tuning details and demonstrate the benefits of the the proposed methodology on a series of challenging synthetic and real-world examples.}, language = {en} } @article{KlusSchusterMuandet, author = {Klus, Stefan and Schuster, Ingmar and Muandet, Krikamol}, title = {Eigendecompositions of Transfer Operators in Reproducing Kernel Hilbert Spaces}, series = {Journal of Machine Learning Research}, journal = {Journal of Machine Learning Research}, abstract = {Transfer operators such as the Perron-Frobenius or Koopman operator play an important role in the global analysis of complex dynamical systems. The eigenfunctions of these operators can be used to detect metastable sets, to project the dynamics onto the dominant slow processes, or to separate superimposed signals. We extend transfer operator theory to reproducing kernel Hilbert spaces and show that these operators are related to Hilbert space representations of conditional distributions, known as conditional mean embeddings in the machine learning community. Moreover, numerical methods to compute empirical estimates of these embeddings are akin to data-driven methods for the approximation of transfer operators such as extended dynamic mode decomposition and its variants. In fact, most of the existing methods can be derived from our framework, providing a unifying view on the approximation of transfer operators. One main benefit of the presented kernel-based approaches is that these methods can be applied to any domain where a similarity measure given by a kernel is available. We illustrate the results with the aid of guiding examples and highlight potential applications in molecular dynamics as well as video and text data analysis.}, language = {en} } @article{KlebanovSchusterSullivan, author = {Klebanov, Ilja and Schuster, Ingmar and Sullivan, T. J.}, title = {A rigorous theory of conditional mean embeddings}, series = {SIAM Journal on Mathematics of Data Science}, volume = {2}, journal = {SIAM Journal on Mathematics of Data Science}, number = {3}, doi = {10.1137/19M1305069}, pages = {583 -- 606}, language = {en} } @article{KlebanovSchuster, author = {Klebanov, Ilja and Schuster, Ingmar}, title = {Markov Chain Importance Sampling - a highly efficient estimator for MCMC}, series = {Journal of Computational and Graphical Statistics}, journal = {Journal of Computational and Graphical Statistics}, doi = {10.1080/10618600.2020.1826953}, abstract = {Markov chain (MC) algorithms are ubiquitous in machine learning and statistics and many other disciplines. Typically, these algorithms can be formulated as acceptance rejection methods. In this work we present a novel estimator applicable to these methods, dubbed Markov chain importance sampling (MCIS), which efficiently makes use of rejected proposals. For the unadjusted Langevin algorithm, it provides a novel way of correcting the discretization error. Our estimator satisfies a central limit theorem and improves on error per CPU cycle, often to a large extent. As a by-product it enables estimating the normalizing constant, an important quantity in Bayesian machine learning and statistics.}, language = {en} } @article{KlusBittracherSchusteretal., author = {Klus, Stefan and Bittracher, Andreas and Schuster, Ingmar and Sch{\"u}tte, Christof}, title = {A kernel-based approach to molecular conformation analysis}, series = {Journal of Chemical Physics}, volume = {149}, journal = {Journal of Chemical Physics}, number = {24}, doi = {10.1063/1.5063533}, abstract = {We present a novel machine learning approach to understanding conformation dynamics of biomolecules. The approach combines kernel-based techniques that are popular in the machine learning community with transfer operator theory for analyzing dynamical systems in order to identify conformation dynamics based on molecular dynamics simulation data. We show that many of the prominent methods like Markov State Models, EDMD, and TICA can be regarded as special cases of this approach and that new efficient algorithms can be constructed based on this derivation. The results of these new powerful methods will be illustrated with several examples, in particular the alanine dipeptide and the protein NTL9.}, language = {en} } @misc{MollenhauerSchusterKlusetal., author = {Mollenhauer, Mattes and Schuster, Ingmar and Klus, Stefan and Sch{\"u}tte, Christof}, title = {Singular Value Decomposition of Operators on Reproducing Kernel Hilbert Spaces}, series = {Advances om Dynamics, Optimization and Computation. Series: Studies in Systems, Decision and Control. A volume dedicated to Michael Dellnitz on his 60th birthday}, volume = {304}, journal = {Advances om Dynamics, Optimization and Computation. Series: Studies in Systems, Decision and Control. A volume dedicated to Michael Dellnitz on his 60th birthday}, editor = {Junge, Oliver and Sch{\"u}tze, O. and Froyland, Gary and Ober-Blobaum, S. and Padberg-Gehle, K.}, publisher = {Springer International}, isbn = {978-3-030-51264-4}, doi = {10.1007/978-3-030-51264-4_5}, pages = {109 -- 131}, language = {en} } @article{GelssKlusSchusteretal., author = {Gelss, Patrick and Klus, Stefan and Schuster, Ingmar and Sch{\"u}tte, Christof}, title = {Feature space approximation for kernel-based supervised learning}, series = {Knowledge-Based Sytems}, volume = {221}, journal = {Knowledge-Based Sytems}, publisher = {Elsevier}, doi = {https://doi.org/10.1016/j.knosys.2021.106935}, language = {en} }