@article{Weiser2009, author = {Weiser, Martin}, title = {Optimization and Identification in Regional Hyperthermia}, series = {Int. J. Appl. Electromagn. and Mech.}, volume = {30}, journal = {Int. J. Appl. Electromagn. and Mech.}, pages = {265 -- 275}, year = {2009}, language = {en} } @article{Weiser2009, author = {Weiser, Martin}, title = {Pointwise Nonlinear Scaling for Reaction-Diffusion Equations}, series = {Appl. Num. Math.}, volume = {59}, journal = {Appl. Num. Math.}, number = {8}, pages = {1858 -- 1869}, year = {2009}, language = {en} } @article{WeiserDeuflhardErdmann2007, author = {Weiser, Martin and Deuflhard, Peter and Erdmann, Bodo}, title = {Affine conjugate adaptive Newton methods for nonlinear elastomechanics}, series = {Opt. Meth. Softw.}, volume = {22}, journal = {Opt. Meth. Softw.}, number = {3}, pages = {413 -- 431}, year = {2007}, language = {en} } @article{WeihrauchWustWeiseretal.2007, author = {Weihrauch, Mirko and Wust, Peter and Weiser, Martin and Nadobny, Johanna and Eisenhardt, Steffen and Budach, Volker and Gellermann, Johanna}, title = {Adaptation of antenna profiles for control of MR guided hyperthermia (HT) in a hybrid MR-HT system}, series = {Medical Physics}, volume = {34}, journal = {Medical Physics}, number = {12}, pages = {4717 -- 4725}, year = {2007}, language = {en} } @article{Weiser2005, author = {Weiser, Martin}, title = {Interior point methods in function space}, series = {SIAM J. Control Optimization}, volume = {44}, journal = {SIAM J. Control Optimization}, number = {5}, pages = {1766 -- 1786}, year = {2005}, language = {en} } @article{SchenklMuggenthalerHubigetal.2017, author = {Schenkl, Sebastian and Muggenthaler, Holger and Hubig, Michael and Erdmann, Bodo and Weiser, Martin and Zachow, Stefan and Heinrich, Andreas and G{\"u}ttler, Felix Victor and Teichgr{\"a}ber, Ulf and Mall, Gita}, title = {Automatic CT-based finite element model generation for temperature-based death time estimation: feasibility study and sensitivity analysis}, series = {International Journal of Legal Medicine}, volume = {131}, journal = {International Journal of Legal Medicine}, number = {3}, doi = {doi:10.1007/s00414-016-1523-0}, pages = {699 -- 712}, year = {2017}, abstract = {Temperature based death time estimation is based either on simple phenomenological models of corpse cooling or on detailed physical heat transfer models. The latter are much more complex, but allow a higher accuracy of death time estimation as in principle all relevant cooling mechanisms can be taken into account. Here, a complete work flow for finite element based cooling simulation models is presented. The following steps are demonstrated on CT-phantoms: • CT-scan • Segmentation of the CT images for thermodynamically relevant features of individual geometries • Conversion of the segmentation result into a Finite Element (FE) simulation model • Computation of the model cooling curve • Calculation of the cooling time For the first time in FE-based cooling time estimation the steps from the CT image over segmentation to FE model generation are semi-automatically performed. The cooling time calculation results are compared to cooling measurements performed on the phantoms under controlled conditions. In this context, the method is validated using different CTphantoms. Some of the CT phantoms thermodynamic material parameters had to be experimentally determined via independent experiments. Moreover the impact of geometry and material parameter uncertainties on the estimated cooling time is investigated by a sensitivity analysis.}, language = {en} } @book{Weiser, author = {Weiser, Martin}, title = {Inside Finite Elements}, publisher = {De Gruyter}, abstract = {All relevant implementation aspects of finite element methods are discussed in this book. The focus is on algorithms and data structures as well as on their concrete implementation. Theory is covered as far as it gives insight into the construction of algorithms.Throughout the exercises a complete FE-solver for scalar 2D problems will be implemented in Matlab/Octave.}, language = {en} } @article{WeiserZachowDeuflhard2010, author = {Weiser, Martin and Zachow, Stefan and Deuflhard, Peter}, title = {Craniofacial Surgery Planning Based on Virtual Patient Models}, series = {it - Information Technology}, volume = {52}, journal = {it - Information Technology}, number = {5}, publisher = {Oldenbourg Verlagsgruppe}, doi = {10.1524/itit.2010.0600}, pages = {258 -- 263}, year = {2010}, language = {en} } @inproceedings{GoetschelTycowiczPolthieretal., author = {G{\"o}tschel, Sebastian and Tycowicz, Christoph von and Polthier, Konrad and Weiser, Martin}, title = {Reducing Memory Requirements in Scientific Computing and Optimal Control}, series = {Multiple Shooting and Time Domain Decomposition Methods}, booktitle = {Multiple Shooting and Time Domain Decomposition Methods}, editor = {Carraro, T. and Geiger, M. and Koerkel, S. and Rannacher, R.}, publisher = {Springer}, pages = {263 -- 287}, language = {en} } @article{GoetschelNagaiahKunischetal., author = {G{\"o}tschel, Sebastian and Nagaiah, Chamakuri and Kunisch, Karl and Weiser, Martin}, title = {Lossy Compression in Optimal Control of Cardiac Defibrillation}, series = {J. Sci. Comput.}, volume = {60}, journal = {J. Sci. Comput.}, number = {1}, doi = {10.1007/s10915-013-9785-x}, pages = {35 -- 59}, abstract = {This paper presents efficient computational techniques for solving an optimization problem in cardiac defibrillation governed by the monodomain equations. Time-dependent electrical currents injected at different spatial positions act as the control. Inexact Newton-CG methods are used, with reduced gradient computation by adjoint solves. In order to reduce the computational complexity, adaptive mesh refinement for state and adjoint equations is performed. To reduce the high storage and bandwidth demand imposed by adjoint gradient and Hessian-vector evaluations, a lossy compression technique for storing trajectory data is applied. An adaptive choice of quantization tolerance based on error estimates is developed in order to ensure convergence. The efficiency of the proposed approach is demonstrated on numerical examples.}, language = {en} } @article{GoetschelWeiser, author = {G{\"o}tschel, Sebastian and Weiser, Martin}, title = {Lossy Compression for PDE-constrained Optimization: Adaptive Error Control}, series = {Comput. Optim. Appl.}, volume = {62}, journal = {Comput. Optim. Appl.}, number = {1}, publisher = {Springer}, pages = {131 -- 155}, abstract = {For the solution of optimal control problems governed by nonlinear parabolic PDEs, methods working on the reduced objective functional are often employed to avoid a full spatio-temporal discretization of the problem. The evaluation of the reduced gradient requires one solve of the state equation forward in time, and one backward solve of the ad-joint equation. The state enters into the adjoint equation, requiring the storage of a full 4D data set. If Newton-CG methods are used, two additional trajectories have to be stored. To get numerical results which are accurate enough, in many case very fine discretizations in time and space are necessary, which leads to a significant amount of data to be stored and transmitted to mass storage. Lossy compression methods were developed to overcome the storage problem by reducing the accuracy of the stored trajectories. The inexact data induces errors in the reduced gradient and reduced Hessian. In this paper, we analyze the influence of such a lossy trajectory compression method on Newton-CG methods for optimal control of parabolic PDEs and design an adaptive strategy for choosing appropriate quantization tolerances.}, language = {en} } @inproceedings{WeiserScacchi, author = {Weiser, Martin and Scacchi, Simone}, title = {Spectral Deferred Correction methods for adaptive electro-mechanical coupling in cardiac simulation}, series = {G. Russo et al.(eds.) Progress in Industrial Mathematics at ECMI 2014}, booktitle = {G. Russo et al.(eds.) Progress in Industrial Mathematics at ECMI 2014}, publisher = {Springer}, doi = {10.1007/978-3-319-23413-7_42}, pages = {321 -- 328}, abstract = {We investigate spectral deferred correction (SDC) methods for time stepping and their interplay with spatio-temporal adaptivity, applied to the solution of the cardiac electro-mechanical coupling model. This model consists of the Monodomain equations, a reaction-diffusion system modeling the cardiac bioelectrical activity, coupled with a quasi-static mechanical model describing the contraction and relaxation of the cardiac muscle. The numerical approximation of the cardiac electro-mechanical coupling is a challenging multiphysics problem, because it exhibits very different spatial and temporal scales. Therefore, spatio-temporal adaptivity is a promising approach to reduce the computational complexity. SDC methods are simple iterative methods for solving collocation systems. We exploit their flexibility for combining them in various ways with spatio-temporal adaptivity. The accuracy and computational complexity of the resulting methods are studied on some numerical examples.}, language = {en} } @article{GoetschelWeiserMaierhoferetal., author = {G{\"o}tschel, Sebastian and Weiser, Martin and Maierhofer, Christiane and Richter, Regina and R{\"o}llig, Mathias}, title = {Fast Defect Shape Reconstruction Based on the Travel Time in Pulse Thermography}, series = {Nondestructive Testing of Materials and Structures}, volume = {6}, journal = {Nondestructive Testing of Materials and Structures}, pages = {83 -- 89}, language = {en} } @inproceedings{KrauseWeiser, author = {Krause, Rolf and Weiser, Martin}, title = {Multilevel augmented Lagrangian solvers for overconstrained contact formulations}, series = {ESAIM: ProcS}, volume = {71}, booktitle = {ESAIM: ProcS}, doi = {10.1051/proc/202171175}, pages = {175 -- 184}, abstract = {Multigrid methods for two-body contact problems are mostly based on special mortar discretizations, nonlinear Gauss-Seidel solvers, and solution-adapted coarse grid spaces. Their high computational efficiency comes at the cost of a complex implementation and a nonsymmetric master-slave discretization of the nonpenetration condition. Here we investigate an alternative symmetric and overconstrained segment-to-segment contact formulation that allows for a simple implementation based on standard multigrid and a symmetric treatment of contact boundaries, but leads to nonunique multipliers. For the solution of the arising quadratic programs, we propose augmented Lagrangian multigrid with overlapping block Gauss-Seidel smoothers. Approximation and convergence properties are studied numerically at standard test problems.}, language = {en} } @book{DeuflhardWeiser, author = {Deuflhard, Peter and Weiser, Martin}, title = {Numerische Mathematik 3. Adaptive L{\"o}sung partieller Differentialgleichungen}, edition = {2}, publisher = {de Gruyter}, isbn = {978-3-11-069168-9}, doi = {10.1515/9783110689655}, pages = {456}, language = {de} } @article{WeiserFreytagErdmannetal., author = {Weiser, Martin and Freytag, Yvonne and Erdmann, Bodo and Hubig, Michael and Mall, Gita}, title = {Optimal Design of Experiments for Estimating the Time of Death in Forensic Medicine}, series = {Inverse Problems}, volume = {34}, journal = {Inverse Problems}, number = {12}, doi = {10.1088/1361-6420/aae7a5}, pages = {125005}, abstract = {Estimation of time of death based on a single measurement of body core temperature is a standard procedure in forensic medicine. Mechanistic models using simulation of heat transport promise higher accuracy than established phenomenological models in particular in nonstandard situations, but involve many not exactly known physical parameters. Identifying both time of death and physical parameters from multiple temperature measurements is one possibility to reduce the uncertainty significantly. In this paper, we consider the inverse problem in a Bayesian setting and perform both local and sampling-based uncertainty quantification, where proper orthogonal decomposition is used as model reduction for fast solution of the forward model. Based on the local uncertainty quantification, optimal design of experiments is performed in order to minimize the uncertainty in the time of death estimate for a given number of measurements. For reasons of practicability, temperature acquisition points are selected from a set of candidates in different spatial and temporal locations. Applied to a real corpse model, a significant accuracy improvement is obtained already with a small number of measurements.}, language = {en} } @article{GoetschelWeiser, author = {G{\"o}tschel, Sebastian and Weiser, Martin}, title = {Compression Challenges in Large Scale Partial Differential Equation Solvers}, series = {Algorithms}, volume = {12}, journal = {Algorithms}, number = {9}, doi = {10.3390/a12090197}, pages = {197}, abstract = {Solvers for partial differential equations (PDEs) are one of the cornerstones of computational science. For large problems, they involve huge amounts of data that need to be stored and transmitted on all levels of the memory hierarchy. Often, bandwidth is the limiting factor due to the relatively small arithmetic intensity, and increasingly due to the growing disparity between computing power and bandwidth. Consequently, data compression techniques have been investigated and tailored towards the specific requirements of PDE solvers over the recent decades. This paper surveys data compression challenges and discusses examples of corresponding solution approaches for PDE problems, covering all levels of the memory hierarchy from mass storage up to the main memory. We illustrate concepts for particular methods, with examples, and give references to alternatives.}, language = {en} } @article{GoetschelSchielaWeiser, author = {G{\"o}tschel, Sebastian and Schiela, Anton and Weiser, Martin}, title = {Kaskade 7 - a Flexible Finite Element Toolbox}, series = {Computers and Mathematics with Applications}, volume = {81}, journal = {Computers and Mathematics with Applications}, doi = {10.1016/j.camwa.2020.02.011}, pages = {444 -- 458}, abstract = {Kaskade 7 is a finite element toolbox for the solution of stationary or transient systems of partial differential equations, aimed at supporting application-oriented research in numerical analysis and scientific computing. The library is written in C++ and is based on the \textsc{Dune} interface. The code is independent of spatial dimension and works with different grid managers. An important feature is the mix-and-match approach to discretizing systems of PDEs with different ansatz and test spaces for all variables. We describe the mathematical concepts behind the library as well as its structure, illustrating its use at several examples on the way.}, language = {en} } @article{AlhaddadFoerstnerGrothetal., author = {Alhaddad, Samer and F{\"o}rstner, Jens and Groth, Stefan and Gr{\"u}newald, Daniel and Grynko, Yevgen and Hannig, Frank and Kenter, Tobias and Pfreundt, Franz-Josef and Plessl, Christian and Schotte, Merlind and Steinke, Thomas and Teich, J{\"u}rgen and Weiser, Martin and Wende, Florian}, title = {HighPerMeshes - A Domain-Specific Language for Numerical Algorithms on Unstructured Grids}, series = {Euro-Par 2020: Parallel Processing Workshops.}, journal = {Euro-Par 2020: Parallel Processing Workshops.}, publisher = {Springer}, doi = {10.1007/978-3-030-71593-9_15}, pages = {185 -- 196}, abstract = {Solving partial differential equations on unstructured grids is a cornerstone of engineering and scientific computing. Nowadays, heterogeneous parallel platforms with CPUs, GPUs, and FPGAs enable energy-efficient and computationally demanding simulations. We developed the HighPerMeshes C++-embedded Domain-Specific Language (DSL) for bridging the abstraction gap between the mathematical and algorithmic formulation of mesh-based algorithms for PDE problems on the one hand and an increasing number of heterogeneous platforms with their different parallel programming and runtime models on the other hand. Thus, the HighPerMeshes DSL aims at higher productivity in the code development process for multiple target platforms. We introduce the concepts as well as the basic structure of the HighPer-Meshes DSL, and demonstrate its usage with three examples, a Poisson and monodomain problem, respectively, solved by the continuous finite element method, and the discontinuous Galerkin method for Maxwell's equation. The mapping of the abstract algorithmic description onto parallel hardware, including distributed memory compute clusters is presented. Finally, the achievable performance and scalability are demonstrated for a typical example problem on a multi-core CPU cluster.}, language = {en} } @article{CheginiKopanicakovaKrauseetal., author = {Chegini, Fatemeh and Kopanicakova, Alena and Krause, Rolf and Weiser, Martin}, title = {Efficient Identification of Scars using Heterogeneous Model Hierarchies}, series = {EP Europace}, volume = {23}, journal = {EP Europace}, doi = {10.1093/europace/euaa402}, pages = {i113 -- i122}, abstract = {Aims. Detection and quantification of myocardial scars are helpful both for diagnosis of heart diseases and for building personalized simulation models. Scar tissue is generally charac­terized by a different conduction of electrical excitation. We aim at estimating conductivity-related parameters from endocardial mapping data, in particular the conductivity tensor. Solving this inverse problem requires computationally expensive monodomain simulations on fine discretizations. Therefore, we aim at accelerating the estimation using a multilevel method combining electrophysiology models of different complexity, namely the mono­domain and the eikonal model. Methods. Distributed parameter estimation is performed by minimizing the misfit between simulated and measured electrical activity on the endocardial surface, subject to the mono­domain model and regularization, leading to a constrained optimization problem. We formulate this optimization problem, including the modeling of scar tissue and different regularizations, and design an efficient iterative solver. We consider monodomain grid hierarchies and monodomain-eikonal model hierarchies in a recursive multilevel trust-region method. Results. From several numerical examples, both the efficiency of the method and the estimation quality, depending on the data, are investigated. The multilevel solver is significantly faster than a comparable single level solver. Endocardial mapping data of realistic density appears to be just sufficient to provide quantitatively reasonable estimates of location, size, and shape of scars close to the endocardial surface. Conclusion. In several situations, scar reconstruction based on eikonal and monodomain models differ significantly, suggesting the use of the more accurate but more expensive monodomain model for this purpose. Still, eikonal models can be utilized to accelerate the computations considerably, enabling the use of complex electrophysiology models for estimating myocardial scars from endocardial mapping data.}, language = {en} }