@incollection{DeuflhardWeiser1997, author = {Deuflhard, Peter and Weiser, Martin}, title = {Local inexact Newton multilevel FEM for nonlinear elliptic problems}, series = {Computational science for the 21st century}, booktitle = {Computational science for the 21st century}, editor = {et al. Bristeau, M.-O.}, publisher = {Chichester: John Wiley \& Sons.}, pages = {129 -- 138}, year = {1997}, language = {en} } @article{NiemannUramWolfetal., author = {Niemann, Jan-Hendrik and Uram, Samuel and Wolf, Sarah and Conrad, Natasa Djurdjevac and Weiser, Martin}, title = {Multilevel Optimization for Policy Design with Agent-Based Epidemic Models}, series = {Computational Science}, volume = {77}, journal = {Computational Science}, doi = {10.1016/j.jocs.2024.102242}, pages = {102242}, abstract = {Epidemiological models can not only be used to forecast the course of a pandemic like COVID-19, but also to propose and design non-pharmaceutical interventions such as school and work closing. In general, the design of optimal policies leads to nonlinear optimization problems that can be solved by numerical algorithms. Epidemiological models come in different complexities, ranging from systems of simple ordinary differential equations (ODEs) to complex agent-based models (ABMs). The former allow a fast and straightforward optimization, but are limited in accuracy, detail, and parameterization, while the latter can resolve spreading processes in detail, but are extremely expensive to optimize. We consider policy optimization in a prototypical situation modeled as both ODE and ABM, review numerical optimization approaches, and propose a heterogeneous multilevel approach based on combining a fine-resolution ABM and a coarse ODE model. Numerical experiments, in particular with respect to convergence speed, are given for illustrative examples.}, language = {en} } @article{SemlerWeiser, author = {Semler, Phillip and Weiser, Martin}, title = {Adaptive Gaussian Process Regression for Efficient Building of Surrogate Models in Inverse Problems}, series = {Inverse Problems}, volume = {39}, journal = {Inverse Problems}, number = {12}, doi = {10.1088/1361-6420/ad0028}, pages = {125003}, abstract = {In a task where many similar inverse problems must be solved, evaluating costly simulations is impractical. Therefore, replacing the model y with a surrogate model y(s) that can be evaluated quickly leads to a significant speedup. The approximation quality of the surrogate model depends strongly on the number, position, and accuracy of the sample points. With an additional finite computational budget, this leads to a problem of (computer) experimental design. In contrast to the selection of sample points, the trade-off between accuracy and effort has hardly been studied systematically. We therefore propose an adaptive algorithm to find an optimal design in terms of position and accuracy. Pursuing a sequential design by incrementally appending the computational budget leads to a convex and constrained optimization problem. As a surrogate, we construct a Gaussian process regression model. We measure the global approximation error in terms of its impact on the accuracy of the identified parameter and aim for a uniform absolute tolerance, assuming that y(s) is computed by finite element calculations. A priori error estimates and a coarse estimate of computational effort relate the expected improvement of the surrogate model error to computational effort, resulting in the most efficient combination of sample point and evaluation tolerance. We also allow for improving the accuracy of already existing sample points by continuing previously truncated finite element solution procedures.}, language = {en} } @inproceedings{SemlerWeiser, author = {Semler, Phillip and Weiser, Martin}, title = {Adaptive Gradient Enhanced Gaussian Process Surrogates for Inverse Problems}, series = {Proceedings of the MATH+ Thematic Einstein Semester on Mathematical Optimization for Machine Learning}, booktitle = {Proceedings of the MATH+ Thematic Einstein Semester on Mathematical Optimization for Machine Learning}, abstract = {Generating simulated training data needed for constructing sufficiently accurate surrogate models to be used for efficient optimization or parameter identification can incur a huge computational effort in the offline phase. We consider a fully adaptive greedy approach to the computational design of experiments problem using gradient-enhanced Gaussian process regression as surrogates. Designs are incrementally defined by solving an optimization problem for accuracy given a certain computational budget. We address not only the choice of evaluation points but also of required simulation accuracy, both of values and gradients of the forward model. Numerical results show a significant reduction of the computational effort compared to just position-adaptive and static designs as well as a clear benefit of including gradient information into the surrogate training.}, language = {en} }