@misc{StallingHege, author = {Stalling, Detlev and Hege, Hans-Christian}, title = {Fast and Resolution Independent Line Integral Convolution}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-1653}, number = {SC-94-37}, abstract = {Line Integral Convolution (LIC) is a powerful technique for generating striking images and animations from vector data. Introduced in 1993, the method has rapidly found many application areas, ranging from computer arts to scientific visualization. Based upon locally filtering an input texture along a curved stream line segment in a vector field, it is able to depict directional information at high spatial resolutions. We present a new method for computing LIC images, which minimizes the total number of stream lines to be computed and thereby reduces computational costs by an order of magnitude compared to the original algorithm. Our methods utilizes fast, error-controlled numerical integrators. Decoupling the characteristic lengths in vector field grid, input texture and output image, it allows to compute filtered images at arbitrary resolution. This feature is of great significance in computer animation as well as in scientific visualization, where it can be used to explore vector data by smoothly enlarging structure of details. We also present methods for improved texture animation, employing constant filter kernels only. To obtain an optimal motion effect, spatial decay of correlation between intensities of distant pixels in the output image has to be controlled. This is achieved by blending different phase shifted box filter animations and by adaptively rescaling the contrast of the output frames.}, language = {en} } @misc{GrammelHegeWunderling, author = {Grammel, Martin and Hege, Hans-Christian and Wunderling, Roland}, title = {On the Impact of Communication Latencies on Distributed Sparse LU Factorization.}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-1245}, number = {SC-93-28}, abstract = {Sparse LU factorization offers some potential for parallelism, but at a level of very fine granularity. However, most current distributed memory MIMD architectures have too high communication latencies for exploiting all parallelism available. To cope with this, latencies must be avoided by coarsening the granularity and by message fusion. However, both techniques limit the concurrency, thereby reducing the scalability. In this paper, an implementation of a parallel LU decomposition algorithm for linear programming bases is presented for distributed memory parallel computers with noticable communication latencies. Several design decisions due to latencies, including data distribution and load balancing techniques, are discussed. An approximate performance model is set up for the algorithm, which allows to quantify the impact of latencies on its performance. Finally, experimental results for an Intel iPSC/860 parallel computer are reported and discussed.}, language = {en} } @misc{BeckDeuflhardHegeetal., author = {Beck, Rudolf and Deuflhard, Peter and Hege, Hans-Christian and Seebass, Martin and Stalling, Detlev}, title = {Numerical Algorithms and Visualization in Medical Treament Planning}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-2643}, number = {SC-96-54}, abstract = {After a short summary on therapy planning and the underlying technologies we discuss quantitative medicine by giving a short overview on medical image data, summarizing some applications of computer based treatment planning, and outlining requirements on medical planning systems. Then we continue with a description of our medical planning system {\sf HyperPlan}. It supports typical working steps in therapy planning, like data aquisition, segmentation, grid generation, numerical simulation and optimization, accompanying these with powerful visualization and interaction techniques.}, language = {en} } @misc{StallingZoecklerHege, author = {Stalling, Detlev and Z{\"o}ckler, Malte and Hege, Hans-Christian}, title = {Fast Display of Illuminated Field Lines}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-2686}, number = {SC-96-58}, abstract = {A new technique for interactive vector field visualization using large numbers of properly illuminated field lines is presented. Taking into account ambient, diffuse, and specular reflection terms as well as transparency and depth cueing, we employ a realistic shading model which significantly increases quality and realism of the resulting images. While many graphics workstations offer hardware support for illuminating surface primitives, usually no means for an accurate shading of line primitives are provided. However, we show that proper illumination of lines can be implemented by exploiting the texture mapping capabilities of modern graphics hardware. In this way high rendering performance with interactive frame rates can be achieved. We apply the technique to render large numbers of integral curves of a vector field. The impression of the resulting images can be further improved by a number of visual enhancements, like transparency and depth-cueing. We also describe methods for controlling the distribution of field lines in space. These methods enable us to use illuminated field lines for interactive exploration of vector fields.}, language = {en} } @misc{BattkeStallingHege, author = {Battke, Henrik and Stalling, Detlev and Hege, Hans-Christian}, title = {Fast Line Integral Convolution for Arbitrary Surfaces in 3D}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-2690}, number = {SC-96-59}, abstract = {We describe an extension of the line integral convolution method (LIC) for imaging of vector fields on arbitrary surfaces in 3D space. Previous approaches were limited to curvilinear surfaces, i.e.~surfaces which can be parametrized globally using 2D-coordinates. By contrast our method also handles the case of general, possibly multiply connected surfaces. The method works by tesselating a given surface with triangles. For each triangle local euclidean coordinates are defined and a local LIC texture is computed. No scaling or distortion is involved when mapping the texture onto the surface. The characteristic length of the texture remains constant. In order to exploit the texture hardware of modern graphics computers we have developed a tiling strategy for arranging a large number of triangular texture pieces within a single rectangular texture image. In this way texture memory is utilized optimally and even large textured surfaces can be explored interactively.}, language = {en} } @misc{DeuflhardSeebassStallingetal., author = {Deuflhard, Peter and Seebass, Martin and Stalling, Detlev and Beck, Rudolf and Hege, Hans-Christian}, title = {Hyperthermia Treatment Planning in Clinical Cancer Therapy: Modelling, Simulation and Visualization}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-2958}, number = {SC-97-26}, abstract = {\noindent The speaker and his co-workers in Scientific Computing and Visualization have established a close cooperation with medical doctors at the Rudolf--Virchow--Klinikum of the Humboldt University in Berlin on the topic of regional hyperthermia. In order to permit a patient--specific treatment planning, a special software system ({\sf\small HyperPlan}) has been developed. \noindent A mathematical model of the clinical system ({\it radio frequency applicator with 8 antennas, water bolus, individual patient body}) involves Maxwell's equations in inhomogeneous media and a so--called bio--heat transfer PDE describing the temperature distribution in the human body. The electromagnetic field and the thermal phenomena need to be computed at a speed suitable for the clinical environment. An individual geometric patient model is generated as a quite complicated tetrahedral ``coarse'' grid (several thousands of nodes). Both Maxwell's equations and the bio--heat transfer equation are solved on that 3D--grid by means of {\em adaptive} multilevel finite element methods, which automatically refine the grid where necessary in view of the required accuracy. Finally optimal antenna parameters for the applicator are determined . \noindent All steps of the planning process are supported by powerful visualization methods. Medical images, contours, grids, simulated electromagnetic fields and temperature distributions can be displayed in combination. A number of new algorithms and techniques had to be developed and implemented. Special emphasis has been put on advanced 3D interaction methods and user interface issues.}, language = {en} } @misc{HegeStalling, author = {Hege, Hans-Christian and Stalling, Detlev}, title = {Fast LIC with Higher Order Filter Kernels}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-3439}, number = {SC-97-74}, abstract = {Line integral convolution (LIC) has become a well-known and popular method for visualizing vector fields. The method works by convolving a random input texture along the integral curves of the vector field. In order to accelerate image synthesis significantly, an efficient algorithm has been proposed that utilizes pixel coherence in field line direction. This algorithm, called ``fast LIC'', originally was restricted to simple box-type filter kernels. Here we describe a generalization of fast LIC for piecewise polynomial filter kernels. Expanding the filter kernels in terms of truncated power functions allows us to exploit a certain convolution theorem. The convolution integral is expressed as a linear combination of repeated integrals (or repeated sums in the discrete case). Compared to the original algorithm the additional expense for using higher order filter kernels, e.g.\ of B-spline type, is very low. Such filter kernels produce smoother, less noisier results than a box filter. This is evident from visual investigation, as well as from analysis of pixel correlations. Thus, our method represents a useful extension of the fast LIC algorithm for the creation of high-quality LIC images.}, language = {en} } @misc{ZachowZilskeHege, author = {Zachow, Stefan and Zilske, Michael and Hege, Hans-Christian}, title = {3D reconstruction of individual anatomy from medical image data: Segmentation and geometry processing}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-10440}, number = {07-41}, abstract = {For medical diagnosis, visualization, and model-based therapy planning three-dimensional geometric reconstructions of individual anatomical structures are often indispensable. Computer-assisted, model-based planning procedures typically cover specific modifications of "virtual anatomy" as well as numeric simulations of associated phenomena, like e.g. mechanical loads, fluid dynamics, or diffusion processes, in order to evaluate a potential therapeutic outcome. Since internal anatomical structures cannot be measured optically or mechanically in vivo, three-dimensional reconstruction of tomographic image data remains the method of choice. In this work the process chain of individual anatomy reconstruction is described which consists of segmentation of medical image data, geometrical reconstruction of all relevant tissue interfaces, up to the generation of geometric approximations (boundary surfaces and volumetric meshes) of three-dimensional anatomy being suited for finite element analysis. All results presented herein are generated with amira ® - a highly interactive software system for 3D data analysis, visualization and geometry reconstruction.}, language = {en} } @misc{LindowBaumHege, author = {Lindow, Norbert and Baum, Daniel and Hege, Hans-Christian}, title = {Ligand Excluded Surface: A New Type of Molecular Surface}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-51194}, abstract = {The most popular molecular surface in molecular visualization is the solvent excluded surface (SES). It provides information about the accessibility of a biomolecule for a solvent molecule that is geometrically approximated by a sphere. During a period of almost four decades, the SES has served for many purposes - including visualization, analysis of molecular interactions and the study of cavities in molecular structures. However, if one is interested in the surface that is accessible to a molecule whose shape differs significantly from a sphere, a different concept is necessary. To address this problem, we generalize the definition of the SES by replacing the probe sphere with the full geometry of the ligand defined by the arrangement of its van der Waals spheres. We call the new surface ligand excluded surface (LES) and present an efficient, grid-based algorithm for its computation. Furthermore, we show that this algorithm can also be used to compute molecular cavities that could host the ligand molecule. We provide a detailed description of its implementation on CPU and GPU. Furthermore, we present a performance and convergence analysis and compare the LES for several molecules, using as ligands either water or small organic molecules.}, language = {en} } @misc{KozlikovaKroneFalketal., author = {Kozlikova, Barbora and Krone, Michael and Falk, Martin and Lindow, Norbert and Baaden, Marc and Baum, Daniel and Viola, Ivan and Parulek, Julius and Hege, Hans-Christian}, title = {Visualization of Biomolecular Structures: State of the Art}, issn = {1438-0064}, doi = {10.2312/eurovisstar.20151112}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-57217}, abstract = {Structural properties of molecules are of primary concern in many fields. This report provides a comprehensive overview on techniques that have been developed in the fields of molecular graphics and visualization with a focus on applications in structural biology. The field heavily relies on computerized geometric and visual representations of three-dimensional, complex, large, and time-varying molecular structures. The report presents a taxonomy that demonstrates which areas of molecular visualization have already been extensively investigated and where the field is currently heading. It discusses visualizations for molecular structures, strategies for efficient display regarding image quality and frame rate, covers different aspects of level of detail, and reviews visualizations illustrating the dynamic aspects of molecular simulation data. The survey concludes with an outlook on promising and important research topics to foster further success in the development of tools that help to reveal molecular secrets.}, language = {en} } @misc{KroneKozlikovaLindowetal., author = {Krone, Michael and Kozlikova, Barbora and Lindow, Norbert and Baaden, Marc and Baum, Daniel and Parulek, Julius and Hege, Hans-Christian and Viola, Ivan}, title = {Visual Analysis of Biomolecular Cavities: State of the Art}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-60193}, abstract = {In this report we review and structure the branch of molecular visualization that is concerned with the visual analysis of cavities in macromolecular protein structures. First the necessary background, the domain terminology, and the goals of analytical reasoning are introduced. Based on a comprehensive collection of relevant research works, we present a novel classification for cavity detection approaches and structure them into four distinct classes: grid-based, Voronoi-based, surface-based, and probe-based methods. The subclasses are then formed by their combinations. We match these approaches with corresponding visualization technologies starting with direct 3D visualization, followed with non-spatial visualization techniques that for example abstract the interactions between structures into a relational graph, straighten the cavity of interest to see its profile in one view, or aggregate the time sequence into a single contour plot. We also discuss the current state of methods for the visual analysis of cavities in dynamic data such as molecular dynamics simulations. Finally, we give an overview of the most common tools that are actively developed and used in the structural biology and biochemistry research. Our report is concluded by an outlook on future challenges in the field.}, language = {en} } @misc{HombergBaumWiebeletal., author = {Homberg, Ulrike and Baum, Daniel and Wiebel, Alexander and Prohaska, Steffen and Hege, Hans-Christian}, title = {Definition, Extraction, and Validation of Pore Structures in Porous Materials}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-42510}, abstract = {An intuitive and sparse representation of the void space of porous materials supports the efficient analysis and visualization of interesting qualitative and quantitative parameters of such materials. We introduce definitions of the elements of this void space, here called pore space, based on its distance function, and present methods to extract these elements using the extremal structures of the distance function. The presented methods are implemented by an image processing pipeline that determines pore centers, pore paths and pore constrictions. These pore space elements build a graph that represents the topology of the pore space in a compact way. The representations we derive from μCT image data of realistic soil specimens enable the computation of many statistical parameters and, thus, provide a basis for further visual analysis and application-specific developments. We introduced parts of our pipeline in previous work. In this chapter, we present additional details and compare our results with the analytic computation of the pore space elements for a sphere packing in order to show the correctness of our graph computation.}, language = {en} } @misc{KramerNoackBaumetal.2017, author = {Kramer, Tobias and Noack, Matthias and Baum, Daniel and Hege, Hans-Christian and Heller, Eric J.}, title = {Dust and gas emission from cometary nuclei: the case of comet 67P/Churyumov-Gerasimenko}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-66338}, year = {2017}, abstract = {Comets display with decreasing solar distance an increased emission of gas and dust particles, leading to the formation of the coma and tail. Spacecraft missions provide insight in the temporal and spatial variations of the dust and gas sources located on the cometary nucleus. For the case of comet 67P/Churyumov-Gerasimenko (67P/C-G), the long-term obser- vations from the Rosetta mission point to a homogeneous dust emission across the entire illuminated surface. Despite the homogeneous initial dis- tribution, a collimation in jet-like structures becomes visible. We propose that this observation is linked directly to the complex shape of the nucleus and projects concave topographical features into the dust coma. To test this hypothesis, we put forward a gas-dust description of 67P/C-G, where gravitational and gas forces are accurately determined from the surface mesh and the rotation of the nucleus is fully incorporated. The emerging jet-like structures persist for a wide range of gas-dust interactions and show a dust velocity dependent bending.}, language = {en} } @misc{SagnolHegeWeiser, author = {Sagnol, Guillaume and Hege, Hans-Christian and Weiser, Martin}, title = {Using sparse kernels to design computer experiments with tunable precision}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-59605}, abstract = {Statistical methods to design computer experiments usually rely on a Gaussian process (GP) surrogate model, and typically aim at selecting design points (combinations of algorithmic and model parameters) that minimize the average prediction variance, or maximize the prediction accuracy for the hyperparameters of the GP surrogate. In many applications, experiments have a tunable precision, in the sense that one software parameter controls the tradeoff between accuracy and computing time (e.g., mesh size in FEM simulations or number of Monte-Carlo samples). We formulate the problem of allocating a budget of computing time over a finite set of candidate points for the goals mentioned above. This is a continuous optimization problem, which is moreover convex whenever the tradeoff function accuracy vs. computing time is concave. On the other hand, using non-concave weight functions can help to identify sparse designs. In addition, using sparse kernel approximations drastically reduce the cost per iteration of the multiplicative weights updates that can be used to solve this problem.}, language = {en} } @misc{KastenReininghausHotzetal., author = {Kasten, Jens and Reininghaus, Jan and Hotz, Ingrid and Hege, Hans-Christian and Noack, Bernd and Daviller, Guillaume and Morzyński, Marek}, title = {Acceleration feature points of unsteady shear flows}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-58397}, abstract = {A framework is proposed for extracting features in 2D transient flows, based on the acceleration field to ensure Galilean invariance. The minima of the acceleration magnitude, i.e. a superset of the acceleration zeros, are extracted and discriminated into vortices and saddle points --- based on the spectral properties of the velocity Jacobian. The extraction of topological features is performed with purely combinatorial algorithms from discrete computational topology. The feature points are prioritized with persistence, as a physically meaningful importance measure. These features are tracked in time with a robust algorithm for tracking features. Thus a space-time hierarchy of the minima is built and vortex merging events are detected. The acceleration feature extraction strategy is applied to three two-dimensional shear flows: (1) an incompressible periodic cylinder wake, (2) an incompressible planar mixing layer and (3) a weakly compressible planar jet. The vortex-like acceleration feature points are shown to be well aligned with acceleration zeros, maxima of the vorticity magnitude, minima of pressure field and minima of λ2.}, language = {en} } @misc{NavaYazdaniHegevonTycowicz, author = {Nava-Yazdani, Esfandiar and Hege, Hans-Christian and von Tycowicz, Christoph}, title = {A Geodesic Mixed Effects Model in Kendall's Shape Space}, series = {Proc. 7th MICCAI workshop on Mathematical Foundations of Computational Anatomy (MFCA)}, journal = {Proc. 7th MICCAI workshop on Mathematical Foundations of Computational Anatomy (MFCA)}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-74621}, abstract = {In many applications, geodesic hierarchical models are adequate for the study of temporal observations. We employ such a model derived for manifold-valued data to Kendall's shape space. In particular, instead of the Sasaki metric, we adapt a functional-based metric, which increases the computational efficiency and does not require the implementation of the curvature tensor. We propose the corresponding variational time discretization of geodesics and apply the approach for the estimation of group trends and statistical testing of 3D shapes derived from an open access longitudinal imaging study on osteoarthritis.}, language = {en} } @misc{SakuraiHegeKuhnetal., author = {Sakurai, Daisuke and Hege, Hans-Christian and Kuhn, Alexander and Rust, Henning and Kern, Bastian and Breitkopf, Tom-Lukas}, title = {An Application-Oriented Framework for Feature Tracking in Atmospheric Sciences}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-72617}, abstract = {In atmospheric sciences, sizes of data sets grow continuously due to increasing resolutions. A central task is the comparison of spatiotemporal fields, to assess different simulations and to compare simulations with observations. A significant information reduction is possible by focusing on geometric-topological features of the fields or on derived meteorological objects. Due to the huge size of the data sets, spatial features have to be extracted in time slices and traced over time. Fields with chaotic component, i.e. without 1:1 spatiotemporal correspondences, can be compared by looking upon statistics of feature properties. Feature extraction, however, requires a clear mathematical definition of the features - which many meteorological objects still lack. Traditionally, object extractions are often heuristic, defined only by implemented algorithms, and thus are not comparable. This work surveys our framework designed for efficient development of feature tracking methods and for testing new feature definitions. The framework supports well-established visualization practices and is being used by atmospheric researchers to diagnose and compare data.}, language = {en} } @misc{LindowBaumHege, author = {Lindow, Norbert and Baum, Daniel and Hege, Hans-Christian}, title = {Atomic Accessibility Radii for Molecular Dynamics Analysis}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-68468}, abstract = {In molecular structure analysis and visualization, the molecule's atoms are often modeled as hard spheres parametrized by their positions and radii. While the atom positions result from experiments or molecular simulations, for the radii typically values are taken from literature. Most often, van der Waals (vdW) radii are used, for which diverse values exist. As a consequence, different visualization and analysis tools use different atomic radii, and the analyses are less objective than often believed. Furthermore, for the geometric accessibility analysis of molecular structures, vdW radii are not well suited. The reason is that during the molecular dynamics simulation, depending on the force field and the kinetic energy in the system, non-bonded atoms can come so close to each other that their vdW spheres intersect. In this paper, we introduce a new kind of atomic radius, called atomic accessibility radius', that better characterizes the accessibility of an atom in a given molecular trajectory. The new radii reflect the movement possibilities of atoms in the simulated physical system. They are computed by solving a linear program that maximizes the radii of the atoms under the constraint that non-bonded spheres do not intersect in the considered molecular trajectory. Using this data-driven approach, the actual accessibility of atoms can be visualized more precisely.}, language = {en} } @misc{LindowBaumLeborgneetal., author = {Lindow, Norbert and Baum, Daniel and Leborgne, Morgan and Hege, Hans-Christian}, title = {Interactive Visualization of RNA and DNA Structures}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-69704}, abstract = {The analysis and visualization of nucleic acids (RNA and DNA) play an increasingly important role due to the growing number of known 3-dimensional structures of such molecules. The great complexity of these structures, in particular, those of RNA, demands interactive visualization to get deeper insights into the relationship between the 2D secondary structure motifs and their 3D tertiary structures. Over the last decades, a lot of research in molecular visualization has focused on the visual exploration of protein structures while nucleic acids have only been marginally addressed. In contrast to proteins, which are composed of amino acids, the ingredients of nucleic acids are nucleotides. They form structuring patterns that differ from those of proteins and, hence, also require different visualization and exploration techniques. In order to support interactive exploration of nucleic acids, the computation of secondary structure motifs as well as their visualization in 2D and 3D must be fast. Therefore, in this paper, we focus on the performance of both the computation and visualization of nucleic acid structure. For the first time, we present a ray casting-based visualization of RNA and DNA secondary and tertiary structures, which enables real-time visualization of even large molecular dynamics trajectories. Furthermore, we provide a detailed description of all important aspects to visualize nucleic acid secondary and tertiary structures. With this, we close an important gap in molecular visualization.}, language = {en} } @misc{AgudoJacomeHegePaetschetal., author = {Agudo J{\´a}come, Leonardo and Hege, Hans-Christian and Paetsch, Olaf and P{\"o}thkow, Kai}, title = {Three-Dimensional Reconstruction and Quantification of Dislocation Substructures from Transmission Electron Microscopy Stereo-Pairs}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-70339}, abstract = {A great amount of material properties is strongly influenced by dislocations, the carriers of plastic deformation. It is therefore paramount to have appropriate tools to quantify dislocation substructures with regard to their features, e.g., dislocation density, Burgers vectors or line direction. While the transmission electron microscope (TEM) has been the most widely-used equipment implemented to investigate dislocations, it usually is limited to the two-dimensional (2D) observation of three-dimensional (3D) structures. We reconstruct, visualize and quantify 3D dislocation substructure models from only two TEM images (stereo-pairs) and assess the results. The reconstruction is based on the manual interactive tracing of filiform objects on both images of the stereo-pair. The reconstruction and quantification method are demonstrated on dark field (DF) scanning (S)TEM micrographs of dislocation substructures imaged under diffraction contrast conditions. For this purpose, thick regions (> 300 nm) of TEM foils are analyzed, which are extracted from a Ni-base superalloy single crystal after high temperature creep deformation. It is shown how the method allows 3D quantification from stereo-pairs in a wide range of tilt conditions, achieving line length and orientation uncertainties of 3 \% and 7°, respectively. Parameters that affect the quality of such reconstructions are discussed.}, language = {en} }