@article{NiemannKlusSchuette, author = {Niemann, Jan-Hendrik and Klus, Stefan and Sch{\"u}tte, Christof}, title = {Data-driven model reduction of agent-based systems using the Koopman generator}, series = {PLOS ONE}, volume = {16}, journal = {PLOS ONE}, number = {5}, doi = {10.1371/journal.pone.0250970}, abstract = {The dynamical behavior of social systems can be described by agent-based models. Although single agents follow easily explainable rules, complex time-evolving patterns emerge due to their interaction. The simulation and analysis of such agent-based models, however, is often prohibitively time-consuming if the number of agents is large. In this paper, we show how Koopman operator theory can be used to derive reduced models of agent-based systems using only simulation or real-world data. Our goal is to learn coarse-grained models and to represent the reduced dynamics by ordinary or stochastic differential equations. The new variables are, for instance, aggregated state variables of the agent-based model, modeling the collective behavior of larger groups or the entire population. Using benchmark problems with known coarse-grained models, we demonstrate that the obtained reduced systems are in good agreement with the analytical results, provided that the numbers of agents is sufficiently large.}, language = {en} } @article{BittracherKlusHamzietal., author = {Bittracher, Andreas and Klus, Stefan and Hamzi, Boumediene and Sch{\"u}tte, Christof}, title = {Dimensionality Reduction of Complex Metastable Systems via Kernel Embeddings of Transition Manifolds}, series = {Journal of Nonlinear Science}, volume = {31}, journal = {Journal of Nonlinear Science}, doi = {10.1007/s00332-020-09668-z}, abstract = {We present a novel kernel-based machine learning algorithm for identifying the low-dimensional geometry of the effective dynamics of high-dimensional multiscale stochastic systems. Recently, the authors developed a mathematical framework for the computation of optimal reaction coordinates of such systems that is based on learning a parameterization of a low-dimensional transition manifold in a certain function space. In this article, we enhance this approach by embedding and learning this transition manifold in a reproducing kernel Hilbert space, exploiting the favorable properties of kernel embeddings. Under mild assumptions on the kernel, the manifold structure is shown to be preserved under the embedding, and distortion bounds can be derived. This leads to a more robust and more efficient algorithm compared to the previous parameterization approaches.}, language = {en} } @article{GelssKlusSchusteretal., author = {Gelss, Patrick and Klus, Stefan and Schuster, Ingmar and Sch{\"u}tte, Christof}, title = {Feature space approximation for kernel-based supervised learning}, series = {Knowledge-Based Sytems}, volume = {221}, journal = {Knowledge-Based Sytems}, publisher = {Elsevier}, doi = {https://doi.org/10.1016/j.knosys.2021.106935}, language = {en} } @misc{NiemannSchuetteKlus, author = {Niemann, Jan-Hendrik and Sch{\"u}tte, Christof and Klus, Stefan}, title = {Simulation data: Data-driven model reduction of agent-based systems using the Koopman generator}, series = {PLOS ONE}, volume = {16}, journal = {PLOS ONE}, number = {5}, doi = {http://doi.org/10.5281/zenodo.4522119}, language = {en} }