@misc{AmbellanHanikvonTycowicz, author = {Ambellan, Felix and Hanik, Martin and von Tycowicz, Christoph}, title = {Morphomatics: Geometric morphometrics in non-Euclidean shape spaces}, doi = {10.12752/8544}, abstract = {Morphomatics is an open-source Python library for (statistical) shape analysis developed within the geometric data analysis and processing research group at Zuse Institute Berlin. It contains prototype implementations of intrinsic manifold-based methods that are highly consistent and avoid the influence of unwanted effects such as bias due to arbitrary choices of coordinates.}, language = {en} } @misc{BaumLindowHegeetal., author = {Baum, Daniel and Lindow, Norbert and Hege, Hans-Christian and Lepper, Verena and Siopi, Tzulia and Kutz, Frank and Mahlow, Kristin and Mahnke, Heinz-Eberhard}, title = {Revealing hidden text in rolled and folded papyri}, issn = {1438-0064}, doi = {10.1007/s00339-017-0808-6}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-61826}, abstract = {Ancient Egyptian papyri are often folded, rolled up or kept as small packages, sometimes even sealed. Physically unrolling or unfolding these packages might severely damage them. We demonstrate a way to get access to the hidden script without physical unfolding by employing computed tomography and mathematical algorithms for virtual unrolling and unfolding. Our algorithmic approaches are combined with manual interaction. This provides the necessary flexibility to enable the unfolding of even complicated and partly damaged papyrus packages. In addition, it allows us to cope with challenges posed by the structure of ancient papyrus, which is rather irregular, compared to other writing substrates like metallic foils or parchment. Unfolding of packages is done in two stages. In the first stage, we virtually invert the physical folding process step by step until the partially unfolded package is topologically equivalent to a scroll or a papyrus sheet folded only along one fold line. To minimize distortions at this stage, we apply the method of moving least squares. In the second stage, the papyrus is simply flattened, which requires the definition of a medial surface. We have applied our software framework to several papyri. In this work, we present the results of applying our approaches to mockup papyri that were either rolled or folded along perpendicular fold lines. In the case of the folded papyrus, our approach represents the first attempt to address the unfolding of such complicated folds.}, language = {en} } @misc{BauschertBuesingD'Andreagiovannietal., author = {Bauschert, Thomas and B{\"u}sing, Christina and D'Andreagiovanni, Fabio and Koster, Arie M.C.A. and Kutschka, Manuel and Steglich, Uwe}, title = {Network Planning under Demand Uncertainty with Robust Optimization}, issn = {1438-0064}, doi = {10.1109/MCOM.2014.6736760}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-42557}, abstract = {The planning of a communication network is inevitably depending on the quality of both the planning tool and the demand forecast used. In this article, we show exemplarily how the emerging area of Robust Optimization can advance the network planning by a more accurate mathematical description of the demand uncertainty. After a general introduction of the concept and its application to a basic network design problem, we present two applications: multi-layer and mixed-line-rate network design. We conclude with a discussion of extensions of the robustness concept to increase the accuracy of handling uncertainties.}, language = {en} } @misc{BeckerHiller, author = {Becker, Kai-Helge and Hiller, Benjamin}, title = {Efficient Enumeration of Acyclic Graph Orientations with Sources or Sinks Revisited}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-77684}, abstract = {In a recent paper, Conte et al. [CGMR2017] presented an algorithm for enumerating all acyclic orientations of a graph G=(V,E) with a single source (and related orientations) with delay O(|V||E|). In this paper we revisit the problem by going back to an early paper by de Fraysseix et al. [FMR1995], who proposed an algorithm for enumerating all bipolar orientations of a graph based on a recursion formula. We first formalize de Fraysseix et al.'s algorithm for bipolar orientations and determine that its delay is also O(|V||E|). We then apply their recursion formula to the case of Conte et al.'s enumeration problem and show that this yields a more efficient enumeration algorithm with delay O(\sqrt(|V|)|E|). Finally, a way to further streamline the algorithm that leads to a particularly simple implementation is suggested.}, language = {en} } @misc{BleyD'AndreagiovanniKarch, author = {Bley, Andreas and D'Andreagiovanni, Fabio and Karch, Daniel}, title = {Scheduling technology migration in WDM Networks}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-42654}, abstract = {The rapid technological evolution of telecommunication networks demands service providers to regularly update their technology, with the aim of remaining competitive in the marketplace. However, upgrading the technology in a network is not a trivial task. New hardware components need to be installed in the network and during the installation network connectivity may be temporarily compromised. The Wavelength Division Multiplexing (WDM) technology, whose upgrade is considered in here, shares fiber links among several optical connections and tearing down a single link may disrupt several optical connections at once. When the upgrades involve large parts of a network, typically not all links can be upgraded in parallel, which may lead to an unavoidable longer disruption of some connections. A bad scheduling of the overall endeavor, however, can dramatically increase the disconnection time of parts of the networks, causing extended service disruption. In this contribution, we study the problem of finding a schedule of the fiber link upgrades that minimizes the total service disruption time. To the best of our knowledge, this problem has not yet been formalized and investigated. The aim of our work is to close this gap by presenting a mathematical optimization model for the problem and an innovative solution algorithm that tackles the intrinsic difficulties of the problem. Computational experience on realistic instances completes our study. Our original investigations have been driven by real needs of DFN, operator of the German National Research and Education Network and our partner in the BMBF research project ROBUKOM (http://www.robukom.de/).}, language = {en} } @misc{BuesingD'Andreagiovanni, author = {B{\"u}sing, Christina and D'Andreagiovanni, Fabio}, title = {A new theoretical framework for Robust Optimization under multi-band uncertainty}, issn = {1438-0064}, doi = {10.1007/978-3-319-00795-3_17}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-42644}, abstract = {We provide an overview of our main results about studying Linear Programming Problems whose coefficient matrix is subject to uncertainty and the uncertainty is modeled through a multi-band set. Such an uncertainty set generalizes the classical one proposed by Bertsimas and Sim and is particularly suitable in the common case of arbitrary non-symmetric distributions of the parameters. Our investigations were inspired by practical needs of our industrial partner in ongoing projects with focus on the design of robust telecommunications networks.}, language = {en} } @misc{BuesingD'AndreagiovanniRaymond, author = {B{\"u}sing, Christina and D'Andreagiovanni, Fabio and Raymond, Annie}, title = {0-1 Multiband Robust Optimization}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-44093}, abstract = {We provide an overview of new theoretical results that we obtained while further investigating multiband robust optimization, a new model for robust optimization that we recently proposed to tackle uncertainty in mixed-integer linear programming. This new model extends and refines the classical Gamma-robustness model of Bertsimas and Sim and is particularly useful in the common case of arbitrary asymmetric distributions of the uncertainty. Here, we focus on uncertain 0-1 programs and we analyze their robust counterparts when the uncertainty is represented through a multiband set. Our investigations were inspired by the needs of our industrial partners in the research project ROBUKOM.}, language = {en} } @misc{ChraparyDalitzNeunetal., author = {Chrapary, Hagen and Dalitz, Wolfgang and Neun, Winfried and Sperber, Wolfram}, title = {Design, concepts, and state of the art of the swMATH service}, issn = {1438-0064}, doi = {10.1007/s11786-017-0305-5}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-62263}, abstract = {In this paper, the concepts and design for an efficient information service for mathematical software and further mathematical research data are presented. The publication-based approach and the Web-based approach are the main building blocks of the service and will be discussed. Heuristic methods are used for identification, extraction, and ranking of information about software and other mathematical research data. The methods provide not only information about the research data but also link software and mathematical research data to the scientific context.}, language = {en} } @misc{ClasenPaarProhaska, author = {Clasen, Malte and Paar, Philip and Prohaska, Steffen}, title = {Level of Detail for Trees Using Clustered Ellipsoids}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-14251}, number = {11-41}, abstract = {We present a level of detail method for trees based on ellipsoids and lines. We leverage the Expectation Maximization algorithm with a Gaussian Mixture Model to create a hierarchy of high-quality leaf clusterings, while the branches are simplified using agglomerative bottom-up clustering to preserve the connectivity. The simplification runs in a preprocessing step and requires no human interaction. For a fly by over and through a scene of 10k trees, our method renders on average at 40 ms/frame, up to 6 times faster than billboard clouds with comparable artifacts.}, language = {en} } @misc{CostaMantonOstrovskyetal., author = {Costa, Marta and Manton, James D. and Ostrovsky, Aaron D. and Prohaska, Steffen and Jefferis, Gregory S.X.E.}, title = {NBLAST: Rapid, sensitive comparison of neuronal structure and construction of neuron family databases}, issn = {1438-0064}, doi = {10.1016/j.neuron.2016.06.012}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-59672}, abstract = {Neural circuit mapping is generating datasets of 10,000s of labeled neurons. New computational tools are needed to search and organize these data. We present NBLAST, a sensitive and rapid algorithm, for measuring pairwise neuronal similarity. NBLAST considers both position and local geometry, decomposing neurons into short segments; matched segments are scored using a probabilistic scoring matrix defined by statistics of matches and non-matches. We validated NBLAST on a published dataset of 16,129 single Drosophila neurons. NBLAST can distinguish neuronal types down to the finest level (single identified neurons) without a priori information. Cluster analysis of extensively studied neuronal classes identified new types and unreported topographical features. Fully automated clustering organized the validation dataset into 1052 clusters, many of which map onto previously described neuronal types. NBLAST supports additional query types including searching neurons against transgene expression patterns. Finally we show that NBLAST is effective with data from other invertebrates and zebrafish.}, language = {en} } @misc{D'AndreagiovanniKrolikowskiPulaj, author = {D'Andreagiovanni, Fabio and Krolikowski, Jonatan and Pulaj, Jonad}, title = {A hybrid primal heuristic for Robust Multiperiod Network Design}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-44081}, abstract = {We investigate the Robust Multiperiod Network Design Problem, a generalization of the classical Capacitated Network Design Problem that additionally considers multiple design periods and provides solutions protected against traffic uncertainty. Given the intrinsic difficulty of the problem, which proves challenging even for state-of-the art commercial solvers, we propose a hybrid primal heuristic based on the combination of ant colony optimization and an exact large neighborhood search. Computational experiments on a set of realistic instances from the SNDlib show that our heuristic can find solutions of extremely good quality with low optimality gap.}, language = {en} } @misc{D'AndreagiovanniRaymond, author = {D'Andreagiovanni, Fabio and Raymond, Annie}, title = {Multiband Robust Optimization and its Adoption in Harvest Scheduling}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-43380}, abstract = {A central assumption in classical optimization is that all the input data of a problem are exact. However, in many real-world problems, the input data are subject to uncertainty. In such situations, neglecting uncertainty may lead to nominally optimal solutions that are actually suboptimal or even infeasible. Robust optimization offers a remedy for optimization under uncertainty by considering only the subset of solutions protected against the data deviations. In this paper, we provide an overview of the main theoretical results of multiband robustness, a new robust optimization model that extends and refines the classical theory introduced by Bertsimas and Sim. After introducing some new results for the special case of pure binary programs, we focus on the harvest scheduling problem and show how multiband robustness can be adopted to tackle the uncertainty affecting the volume of produced timber and grant a reduction in the price of robustness.}, language = {en} } @misc{DercksenHegeOberlaender2013, author = {Dercksen, Vincent J. and Hege, Hans-Christian and Oberlaender, Marcel}, title = {The Filament Editor: An Interactive Software Environment for Visualization, Proof-Editing and Analysis of 3D Neuron Morphology}, issn = {1438-0064}, doi = {10.1007/s12021-013-9213-2}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-43157}, year = {2013}, abstract = {Neuroanatomical analysis, such as classification of cell types, depends on reliable reconstruction of large numbers of complete 3D dendrite and axon morphologies. At present, the majority of neuron reconstructions are obtained from preparations in a single tissue slice in vitro, thus suffering from cut off dendrites and, more dramatically, cut off axons. In general, axons can innervate volumes of several cubic millimeters and may reach path lengths of tens of centimeters. Thus, their complete reconstruction requires in vivo labeling, histological sectioning and imaging of large fields of view. Unfortunately, anisotropic background conditions across such large tissue volumes, as well as faintly labeled thin neurites, result in incomplete or erroneous automated tracings and even lead experts to make annotation errors during manual reconstructions. Consequently, tracing reliability renders the major bottleneck for reconstructing complete 3D neuron morphologies. Here, we present a novel set of tools, integrated into a software environment named 'Filament Editor', for creating reliable neuron tracings from sparsely labeled in vivo datasets. The Filament Editor allows for simultaneous visualization of complex neuronal tracings and image data in a 3D viewer, proof-editing of neuronal tracings, alignment and interconnection across sections, and morphometric analysis in relation to 3D anatomical reference structures. We illustrate the functionality of the Filament Editor on the example of in vivo labeled axons and demonstrate that for the exemplary dataset the final tracing results after proof-editing are independent of the expertise of the human operator.}, language = {en} } @misc{EhlkeRammLameckeretal., author = {Ehlke, Moritz and Ramm, Heiko and Lamecker, Hans and Hege, Hans-Christian and Zachow, Stefan}, title = {Fast Generation of Virtual X-ray Images from Deformable Tetrahedral Meshes}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-41896}, abstract = {We propose a novel GPU-based approach to render virtual X-ray projections of deformable tetrahedral meshes. These meshes represent the shape and the internal density distribution of a particular anatomical structure and are derived from statistical shape and intensity models (SSIMs). We apply our method to improve the geometric reconstruction of 3D anatomy (e.g.\ pelvic bone) from 2D X-ray images. For that purpose, shape and density of a tetrahedral mesh are varied and virtual X-ray projections are generated within an optimization process until the similarity between the computed virtual X-ray and the respective anatomy depicted in a given clinical X-ray is maximized. The OpenGL implementation presented in this work deforms and projects tetrahedral meshes of high resolution (200.000+ tetrahedra) at interactive rates. It generates virtual X-rays that accurately depict the density distribution of an anatomy of interest. Compared to existing methods that accumulate X-ray attenuation in deformable meshes, our novel approach significantly boosts the deformation/projection performance. The proposed projection algorithm scales better with respect to mesh resolution and complexity of the density distribution, and the combined deformation and projection on the GPU scales better with respect to the number of deformation parameters. The gain in performance allows for a larger number of cycles in the optimization process. Consequently, it reduces the risk of being stuck in a local optimum. We believe that our approach contributes in orthopedic surgery, where 3D anatomy information needs to be extracted from 2D X-rays to support surgeons in better planning joint replacements.}, language = {en} } @misc{GriewankStreubelLehmannetal., author = {Griewank, Andreas and Streubel, Tom and Lehmann, Lutz and Hasenfelder, Richard and Radons, Manuel}, title = {Piecewise linear secant approximation via Algorithmic Piecewise Differentiation}, issn = {1438-0064}, doi = {10.1080/10556788.2017.1387256}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-61642}, abstract = {It is shown how piecewise differentiable functions \(F: R^n → R^m\) that are defined by evaluation programs can be approximated locally by a piecewise linear model based on a pair of sample points x̌ and x̂. We show that the discrepancy between function and model at any point x is of the bilinear order O(||x - x̌|| ||x - x̂||). This is a little surprising since x ∈ R^n may vary over the whole Euclidean space, and we utilize only two function samples F̌ = F(x̌) and F̂ = F(x̂), as well as the intermediates computed during their evaluation. As an application of the piecewise linearization procedure we devise a generalized Newton's method based on successive piecewise linearization and prove for it sufficient conditions for convergence and convergence rates equaling those of semismooth Newton. We conclude with the derivation of formulas for the numerically stable implementation of the aforedeveloped piecewise linearization methods.}, language = {en} } @misc{HaslerPetersKottig, author = {Hasler, Tim and Peters-Kottig, Wolfgang}, title = {Vorschrift oder Thunfisch? - Zur Langzeitverf{\"u}gbarkeit von Forschungsdaten}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-43010}, abstract = {„Ich mache ihm ein Angebot, das er nicht ablehnen kann." Diese Aussage aus einem g{\"a}nzlich anderen Kontext l{\"a}sst sich recht treffend {\"u}bertragen als Wunsch von Dienstleistern und Zweck von Dienstleistungen f{\"u}r Datenproduzenten im Forschungsdatenmanagement. Zwar wirkt Druck zur Daten{\"u}bergabe nicht f{\"o}rderlich, die Er{\"o}ffnung einer Option aber sehr wohl. Im vorliegenden Artikel geht es um das Verst{\"a}ndnis der Nachhaltigkeit von Forschung und ihren Daten anhand der Erkenntnisse und Erfahrungen aus der ersten Phase des DFG-Projekts EWIG. [Fn 01] Eine Auswahl von Fallstricken beim Forschungsdatenmanagement wird anhand der Erkenntnisse aus Expertengespr{\"a}chen und eigenen Erfahrungen beim Aufbau von LZA-Workflows vorgestellt. Erste Konzepte in EWIG zur Daten{\"u}bertragung aus unterschiedlich strukturierten Datenquellen in die „Langfristige Dom{\"a}ne" werden beschrieben.}, language = {de} } @misc{HillerVredeveld, author = {Hiller, Benjamin and Vredeveld, Tjark}, title = {Stochastic dominance analysis of Online Bin Coloring algorithms}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-16502}, abstract = {This paper proposes a new method for probabilistic analysis of online algorithms. It is based on the notion of stochastic dominance. We develop the method for the online bin coloring problem introduced by Krumke et al (2008). Using methods for the stochastic comparison of Markov chains we establish the result that the performance of the online algorithm GreedyFit is stochastically better than the performance of the algorithm OneBin for any number of items processed. This result gives a more realistic picture than competitive analysis and explains the behavior observed in simulations.}, language = {en} } @misc{HillerVredeveld, author = {Hiller, Benjamin and Vredeveld, Tjark}, title = {Probabilistic alternatives for competitive analysis}, issn = {1438-0064}, doi = {10.1007/s00450-011-0149-1}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-15131}, abstract = {In the last 20 years competitive analysis has become the main tool for analyzing the quality of online algorithms. Despite of this, competitive analysis has also been criticized: It sometimes cannot discriminate between algorithms that exhibit significantly different empirical behavior, or it even favors an algorithm that is worse from an empirical point of view. Therefore, there have been several approaches to circumvent these drawbacks. In this survey, we discuss probabilistic alternatives for competitive analysis.}, language = {en} } @misc{Hoffmann, type = {Master Thesis}, author = {Hoffmann, Marie}, title = {Approximate Algorithms for Distributed Systems}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-42370}, school = {Zuse Institute Berlin (ZIB)}, pages = {75}, abstract = {Peer-to-peer (P2P) systems form a special class of distributed systems. Typically, nodes in a P2P system are flat and share the same responsabilities. In this thesis we focus on three problems that occur in P2P systems: the storage of data replicates, quantile computation on distributed data streams, and churn rate estimation. Data replication is one of the oldest techniques to maintain stored data in a P2P system and to reply to read requests. Applications, which use data replication are distributed databases. They are part of an abstract overlay network and do not see the underlying network topology. The question is how to place a set of data replicates in a distributed system such that response times and failure probabilities become minimal without a priori knowledge of the topology of the underlying hardware nodes? We show how to utilize an agglomerative clustering procedure to reach this goal. State-of-the-art algorithms for aggregation of distributed data or data streams require at some point synchronization, or merge data aggregates hierarchically, which does not accompany the basic principle of P2P systems. We test whether randomized communication and merging of data aggregates are able to produce the same results. These data aggregates serve for quantile queries. Constituting and maintaining a P2P overlay network requires frequent message passing. It is a goal to minimize the number of maintenance messages since they consume bandwidth which might be missing for other applications. The lower bound of the frequency for mainte- nance messages is highly dependent on the churn rate of peers. We show how to estimate the mean lifetime of peers and to reduce the frequency for maintenance messages without destabilizing the infrastructure of the constituting overlay.}, language = {en} } @misc{KaplanLauferProhaskaetal., author = {Kaplan, Bernhard and Laufer, Jan and Prohaska, Steffen and Buchmann, Jens}, title = {Monte-Carlo-based inversion scheme for 3D quantitative photoacoustic tomography}, issn = {1438-0064}, doi = {10.1117/12.2251945}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-62318}, abstract = {The goal of quantitative photoacoustic tomography (qPAT) is to recover maps of the chromophore distributions from multiwavelength images of the initial pressure. Model-based inversions that incorporate the physical processes underlying the photoacoustic (PA) signal generation represent a promising approach. Monte-Carlo models of the light transport are computationally expensive, but provide accurate fluence distributions predictions, especially in the ballistic and quasi-ballistic regimes. Here, we focus on the inverse problem of 3D qPAT of blood oxygenation and investigate the application of the Monte-Carlo method in a model-based inversion scheme. A forward model of the light transport based on the MCX simulator and acoustic propagation modeled by the k-Wave toolbox was used to generate a PA image data set acquired in a tissue phantom over a planar detection geometry. The combination of the optical and acoustic models is shown to account for limited-view artifacts. In addition, the errors in the fluence due to, for example, partial volume artifacts and absorbers immediately adjacent to the region of interest are investigated. To accomplish large-scale inversions in 3D, the number of degrees of freedom is reduced by applying image segmentation to the initial pressure distribution to extract a limited number of regions with homogeneous optical parameters. The absorber concentration in the tissue phantom was estimated using a coordinate descent parameter search based on the comparison between measured and modeled PA spectra. The estimated relative concentrations using this approach lie within 5 \% compared to the known concentrations. Finally, we discuss the feasibility of this approach to recover the blood oxygenation from experimental data.}, language = {en} }