@misc{BorndoerferDaneckerWeiser, author = {Bornd{\"o}rfer, Ralf and Danecker, Fabian and Weiser, Martin}, title = {Convergence Properties of Newton's Method for Globally Optimal Free Flight Trajectory Optimization}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-91309}, abstract = {The algorithmic efficiency of Newton-based methods for Free Flight Trajectory Optimization is heavily influenced by the size of the domain of convergence. We provide numerical evidence that the convergence radius is much larger in practice than what the theoretical worst case bounds suggest. The algorithm can be further improved by a convergence-enhancing domain decomposition.}, language = {en} } @misc{BorndoerferDaneckerWeiser, author = {Bornd{\"o}rfer, Ralf and Danecker, Fabian and Weiser, Martin}, title = {A Discrete-Continuous Algorithm for Free Flight Planning}, issn = {1438-0064}, doi = {10.3390/a14010004}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-81343}, abstract = {We propose a hybrid discrete-continuous algorithm for flight planning in free flight airspaces. In a first step, our DisCOptER method discrete-continuous optimization for enhanced resolution) computes a globally optimal approximate flight path on a discretization of the problem using the A* method. This route initializes a Newton method that converges rapidly to the smooth optimum in a second step. The correctness, accuracy, and complexity of the method are goverened by the choice of the crossover point that determines the coarseness of the discretization. We analyze the optimal choice of the crossover point and demonstrate the asymtotic superority of DisCOptER over a purely discrete approach.}, language = {en} } @misc{GoetschelWeiser, author = {G{\"o}tschel, Sebastian and Weiser, Martin}, title = {Lossy Compression for Large Scale PDE Problems}, issn = {1438-0064}, doi = {10.1101/506378}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-73817}, abstract = {Solvers for partial differential equations (PDE) are one of the cornerstones of computational science. For large problems, they involve huge amounts of data that needs to be stored and transmitted on all levels of the memory hierarchy. Often, bandwidth is the limiting factor due to relatively small arithmetic intensity, and increasingly so due to the growing disparity between computing power and bandwidth. Consequently, data compression techniques have been investigated and tailored towards the specific requirements of PDE solvers during the last decades. This paper surveys data compression challenges and corresponding solution approaches for PDE problems, covering all levels of the memory hierarchy from mass storage up to main memory. Exemplarily, we illustrate concepts at particular methods, and give references to alternatives.}, language = {en} } @misc{GoetschelSchielaWeiser, author = {G{\"o}tschel, Sebastian and Schiela, Anton and Weiser, Martin}, title = {Kaskade 7 -- a Flexible Finite Element Toolbox}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-74616}, abstract = {Kaskade 7 is a finite element toolbox for the solution of stationary or transient systems of partial differential equations, aimed at supporting application-oriented research in numerical analysis and scientific computing. The library is written in C++ and is based on the Dune interface. The code is independent of spatial dimension and works with different grid managers. An important feature is the mix-and-match approach to discretizing systems of PDEs with different ansatz and test spaces for all variables. We describe the mathematical concepts behind the library as well as its structure, illustrating its use at several examples on the way.}, language = {en} } @misc{WeiserFreytagErdmannetal., author = {Weiser, Martin and Freytag, Yvonne and Erdmann, Bodo and Hubig, Michael and Mall, Gita}, title = {Optimal Design of Experiments for Estimating the Time of Death in Forensic Medicine}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-67247}, abstract = {Estimation of time of death based on a single measurement of body core temperature is a standard procedure in forensic medicine. Mechanistic models using simulation of heat transport promise higher accuracy than established phenomenological models in particular in nonstandard situations, but involve many not exactly known physical parameters. Identifying both time of death and physical parameters from multiple temperature measurements is one possibility to reduce the uncertainty significantly. In this paper, we consider the inverse problem in a Bayesian setting and perform both local and sampling-based uncertainty quantification, where proper orthogonal decomposition is used as model reduction for fast solution of the forward model. Based on the local uncertainty quantification, optimal design of experiments is performed in order to minimize the uncertainty in the time of death estimate for a given number of measurements. For reasons of practicability, temperature acquisition points are selected from a set of candidates in different spatial and temporal locations. Applied to a real corpse model, a significant accuracy improvement is obtained already with a small number of measurements.}, language = {en} } @misc{FischerGoetschelWeiser, author = {Fischer, Lisa and G{\"o}tschel, Sebastian and Weiser, Martin}, title = {Lossy data compression reduces communication time in hybrid time-parallel integrators}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-63961}, abstract = {Parallel in time methods for solving initial value problems are a means to increase the parallelism of numerical simulations. Hybrid parareal schemes interleaving the parallel in time iteration with an iterative solution of the individual time steps are among the most efficient methods for general nonlinear problems. Despite the hiding of communication time behind computation, communication has in certain situations a significant impact on the total runtime. Here we present strict, yet no sharp, error bounds for hybrid parareal methods with inexact communication due to lossy data compression, and derive theoretical estimates of the impact of compression on parallel efficiency of the algorithms. These and some computational experiments suggest that compression is a viable method to make hybrid parareal schemes robust with respect to low bandwidth setups.}, language = {en} } @misc{WeiserErdmannSchenkletal.2017, author = {Weiser, Martin and Erdmann, Bodo and Schenkl, Sebastian and Muggenthaler, Holger and Hubig, Michael and Mall, Gita and Zachow, Stefan}, title = {Uncertainty in Temperature-Based Determination of Time of Death}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-63818}, year = {2017}, abstract = {Temperature-based estimation of time of death (ToD) can be per- formed either with the help of simple phenomenological models of corpse cooling or with detailed mechanistic (thermodynamic) heat transfer mod- els. The latter are much more complex, but allow a higher accuracy of ToD estimation as in principle all relevant cooling mechanisms can be taken into account. The potentially higher accuracy depends on the accuracy of tissue and environmental parameters as well as on the geometric resolution. We in- vestigate the impact of parameter variations and geometry representation on the estimated ToD based on a highly detailed 3D corpse model, that has been segmented and geometrically reconstructed from a computed to- mography (CT) data set, differentiating various organs and tissue types. From that we identify the most crucial parameters to measure or estimate, and obtain a local uncertainty quantifcation for the ToD.}, language = {en} } @misc{SagnolHegeWeiser, author = {Sagnol, Guillaume and Hege, Hans-Christian and Weiser, Martin}, title = {Using sparse kernels to design computer experiments with tunable precision}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-59605}, abstract = {Statistical methods to design computer experiments usually rely on a Gaussian process (GP) surrogate model, and typically aim at selecting design points (combinations of algorithmic and model parameters) that minimize the average prediction variance, or maximize the prediction accuracy for the hyperparameters of the GP surrogate. In many applications, experiments have a tunable precision, in the sense that one software parameter controls the tradeoff between accuracy and computing time (e.g., mesh size in FEM simulations or number of Monte-Carlo samples). We formulate the problem of allocating a budget of computing time over a finite set of candidate points for the goals mentioned above. This is a continuous optimization problem, which is moreover convex whenever the tradeoff function accuracy vs. computing time is concave. On the other hand, using non-concave weight functions can help to identify sparse designs. In addition, using sparse kernel approximations drastically reduce the cost per iteration of the multiplicative weights updates that can be used to solve this problem.}, language = {en} } @misc{GoetschelMaierhoferMuelleretal., author = {G{\"o}tschel, Sebastian and Maierhofer, Christiane and M{\"u}ller, Jan P. and Rothbart, Nick and Weiser, Martin}, title = {Quantitative Defect Reconstruction in Active Thermography for Fiber-Reinforced Composites}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-58374}, abstract = {Carbon-fiber reinforced composites are becoming more and more important in the production of light-weight structures, e.g., in the automotive and aerospace industry. Thermography is often used for non-destructive testing of these products, especially to detect delaminations between different layers of the composite. In this presentation, we aim at methods for defect reconstruction from thermographic measurements of such carbon-fiber reinforced composites. The reconstruction results shall not only allow to locate defects, but also give a quantitative characterization of the defect properties. We discuss the simulation of the measurement process using finite element methods, as well as the experimental validation on flat bottom holes. Especially in pulse thermography, thin boundary layers with steep temperature gradients occurring at the heated surface need to be resolved. Here we use the combination of a 1D analytical solution combined with numerical solution of the remaining defect equation. We use the simulations to identify material parameters from the measurements. Finally, fast heuristics for reconstructing defect geometries are applied to the acquired data, and compared for their accuracy and utility in detecting different defects like back surface defects or delaminations.}, language = {en} } @misc{GoetschelHoehneKolkoorietal., author = {G{\"o}tschel, Sebastian and H{\"o}hne, Christian and Kolkoori, Sanjeevareddy and Mitzscherling, Steffen and Prager, Jens and Weiser, Martin}, title = {Ray Tracing Boundary Value Problems: Simulation and SAFT Reconstruction for Ultrasonic Testing}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-58386}, abstract = {The application of advanced imaging techniques for the ultrasonic inspection of inhomogeneous anisotropic materials like austenitic and dissimilar welds requires information about acoustic wave propagation through the material, in particular travel times between two points in the material. Forward ray tracing is a popular approach to determine traveling paths and arrival times but is ill suited for inverse problems since a large number of rays have to be computed in order to arrive at prescribed end points. In this contribution we discuss boundary value problems for acoustic rays, where the ray path between two given points is determined by solving the eikonal equation. The implementation of such a two point boundary value ray tracer for sound field simulations through an austenitic weld is described and its efficiency as well as the obtained results are compared to those of a forward ray tracer. The results are validated by comparison with experimental results and commercially available UT simulation tools. As an application, we discuss an implementation of the method for SAFT (Synthetic Aperture Focusing Technique) reconstruction. The ray tracer calculates the required travel time through the anisotropic columnar grain structure of the austenitic weld. There, the formulation of ray tracing as a boundary value problem allows a straightforward derivation of the ray path from a given transducer position to any pixel in the reconstruction area and reduces the computational cost considerably.}, language = {en} }