@article{EhlkeRammLameckeretal.2013, author = {Ehlke, Moritz and Ramm, Heiko and Lamecker, Hans and Hege, Hans-Christian and Zachow, Stefan}, title = {Fast Generation of Virtual X-ray Images for Reconstruction of 3D Anatomy}, series = {IEEE Transactions on Visualization and Computer Graphics}, volume = {19}, journal = {IEEE Transactions on Visualization and Computer Graphics}, number = {12}, doi = {10.1109/TVCG.2013.159}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-35928}, pages = {2673 -- 2682}, year = {2013}, language = {en} } @article{DunlopApanaskevichLehmannetal.2016, author = {Dunlop, Jason and Apanaskevich, Dmitry and Lehmann, Jens and Hoffmann, Rene and Fusseis, Florian and Ehlke, Moritz and Zachow, Stefan and Xiao, Xianghui}, title = {Microtomography of the Baltic amber tick Ixodes succineus reveals affinities with the modern Asian disease vector Ixodes ovatus}, series = {BMC Evolutionary Biology}, volume = {16}, journal = {BMC Evolutionary Biology}, number = {1}, doi = {10.1186/s12862-016-0777-y}, year = {2016}, abstract = {Background: Fossil ticks are extremely rare, whereby Ixodes succineus Weidner, 1964 from Eocene (ca. 44-49 Ma) Baltic amber is one of the oldest examples of a living hard tick genus (Ixodida: Ixodidae). Previous work suggested it was most closely related to the modern and widespread European sheep tick Ixodes ricinus (Linneaus, 1758). Results: Restudy using phase contrast synchrotron x-ray tomography yielded images of exceptional quality. These confirm the fossil's referral to Ixodes Latreille, 1795, but the characters resolved here suggest instead affinities with the Asian subgenus Partipalpiger Hoogstraal et al., 1973 and its single living (and medically significant) species Ixodes ovatus Neumann, 1899. We redescribe the amber fossil here as Ixodes (Partipalpiger) succineus. Conclusions: Our data suggest that Ixodes ricinus is unlikely to be directly derived from Weidner's amber species, but instead reveals that the Partipalpiger lineage was originally more widely distributed across the northern hemisphere. The closeness of Ixodes (P.) succineus to a living vector of a wide range of pathogens offers the potential to correlate its spatial and temporal position (northern Europe, nearly 50 million years ago) with the estimated origination dates of various tick-borne diseases.}, language = {en} } @article{PimentelSzengelEhlkeetal., author = {Pimentel, Pedro and Szengel, Angelika and Ehlke, Moritz and Lamecker, Hans and Zachow, Stefan and Estacio, Laura and Doenitz, Christian and Ramm, Heiko}, title = {Automated Virtual Reconstruction of Large Skull Defects using Statistical Shape Models and Generative Adversarial Networks}, series = {Towards the Automatization of Cranial Implant Design in Cranioplasty}, volume = {12439}, journal = {Towards the Automatization of Cranial Implant Design in Cranioplasty}, editor = {Li, Jianning and Egger, Jan}, edition = {1}, publisher = {Springer International Publishing}, doi = {10.1007/978-3-030-64327-0_3}, pages = {16 -- 27}, abstract = {We present an automated method for extrapolating missing regions in label data of the skull in an anatomically plausible manner. The ultimate goal is to design patient-speci� c cranial implants for correcting large, arbitrarily shaped defects of the skull that can, for example, result from trauma of the head. Our approach utilizes a 3D statistical shape model (SSM) of the skull and a 2D generative adversarial network (GAN) that is trained in an unsupervised fashion from samples of healthy patients alone. By � tting the SSM to given input labels containing the skull defect, a First approximation of the healthy state of the patient is obtained. The GAN is then applied to further correct and smooth the output of the SSM in an anatomically plausible manner. Finally, the defect region is extracted using morphological operations and subtraction between the extrapolated healthy state of the patient and the defective input labels. The method is trained and evaluated based on data from the MICCAI 2020 AutoImplant challenge. It produces state-of-the art results on regularly shaped cut-outs that were present in the training and testing data of the challenge. Furthermore, due to unsupervised nature of the approach, the method generalizes well to previously unseen defects of varying shapes that were only present in the hidden test dataset.}, language = {en} } @article{AmbellanTackEhlkeetal., author = {Ambellan, Felix and Tack, Alexander and Ehlke, Moritz and Zachow, Stefan}, title = {Automated Segmentation of Knee Bone and Cartilage combining Statistical Shape Knowledge and Convolutional Neural Networks: Data from the Osteoarthritis Initiative}, series = {Medical Image Analysis}, volume = {52}, journal = {Medical Image Analysis}, number = {2}, doi = {10.1016/j.media.2018.11.009}, pages = {109 -- 118}, abstract = {We present a method for the automated segmentation of knee bones and cartilage from magnetic resonance imaging that combines a priori knowledge of anatomical shape with Convolutional Neural Networks (CNNs). The proposed approach incorporates 3D Statistical Shape Models (SSMs) as well as 2D and 3D CNNs to achieve a robust and accurate segmentation of even highly pathological knee structures. The shape models and neural networks employed are trained using data of the Osteoarthritis Initiative (OAI) and the MICCAI grand challenge "Segmentation of Knee Images 2010" (SKI10), respectively. We evaluate our method on 40 validation and 50 submission datasets of the SKI10 challenge. For the first time, an accuracy equivalent to the inter-observer variability of human readers has been achieved in this challenge. Moreover, the quality of the proposed method is thoroughly assessed using various measures for data from the OAI, i.e. 507 manual segmentations of bone and cartilage, and 88 additional manual segmentations of cartilage. Our method yields sub-voxel accuracy for both OAI datasets. We made the 507 manual segmentations as well as our experimental setup publicly available to further aid research in the field of medical image segmentation. In conclusion, combining statistical anatomical knowledge via SSMs with the localized classification via CNNs results in a state-of-the-art segmentation method for knee bones and cartilage from MRI data.}, language = {en} } @article{LiPimentelSzengeletal., author = {Li, Jianning and Pimentel, Pedro and Szengel, Angelika and Ehlke, Moritz and Lamecker, Hans and Zachow, Stefan and Estacio, Laura and Doenitz, Christian and Ramm, Heiko and Shi, Haochen and Chen, Xiaojun and Matzkin, Franco and Newcombe, Virginia and Ferrante, Enzo and Jin, Yuan and Ellis, David G. and Aizenberg, Michele R. and Kodym, Oldrich and Spanel, Michal and Herout, Adam and Mainprize, James G. and Fishman, Zachary and Hardisty, Michael R. and Bayat, Amirhossein and Shit, Suprosanna and Wang, Bomin and Liu, Zhi and Eder, Matthias and Pepe, Antonio and Gsaxner, Christina and Alves, Victor and Zefferer, Ulrike and von Campe, Cord and Pistracher, Karin and Sch{\"a}fer, Ute and Schmalstieg, Dieter and Menze, Bjoern H. and Glocker, Ben and Egger, Jan}, title = {AutoImplant 2020 - First MICCAI Challenge on Automatic Cranial Implant Design}, series = {IEEE Transactions on Medical Imaging}, volume = {40}, journal = {IEEE Transactions on Medical Imaging}, number = {9}, issn = {0278-0062}, doi = {10.1109/TMI.2021.3077047}, pages = {2329 -- 2342}, abstract = {The aim of this paper is to provide a comprehensive overview of the MICCAI 2020 AutoImplant Challenge. The approaches and publications submitted and accepted within the challenge will be summarized and reported, highlighting common algorithmic trends and algorithmic diversity. Furthermore, the evaluation results will be presented, compared and discussed in regard to the challenge aim: seeking for low cost, fast and fully automated solutions for cranial implant design. Based on feedback from collaborating neurosurgeons, this paper concludes by stating open issues and post-challenge requirements for intra-operative use.}, language = {en} } @article{SekuboyinaHusseiniBayatetal., author = {Sekuboyina, Anjany and Husseini, Malek E. and Bayat, Amirhossein and L{\"o}ffler, Maximilian and Liebl, Hans and Li, Hongwei and Tetteh, Giles and Kukačka, Jan and Payer, Christian and Štern, Darko and Urschler, Martin and Chen, Maodong and Cheng, Dalong and Lessmann, Nikolas and Hu, Yujin and Wang, Tianfu and Yang, Dong and Xu, Daguang and Ambellan, Felix and Amiranashvili, Tamaz and Ehlke, Moritz and Lamecker, Hans and Lehnert, Sebastian and Lirio, Marilia and de Olaguer, Nicol{\´a}s P{\´e}rez and Ramm, Heiko and Sahu, Manish and Tack, Alexander and Zachow, Stefan and Jiang, Tao and Ma, Xinjun and Angerman, Christoph and Wang, Xin and Brown, Kevin and Kirszenberg, Alexandre and Puybareau, {\´E}lodie and Chen, Di and Bai, Yiwei and Rapazzo, Brandon H. and Yeah, Timyoas and Zhang, Amber and Xu, Shangliang and Hou, Feng and He, Zhiqiang and Zeng, Chan and Xiangshang, Zheng and Liming, Xu and Netherton, Tucker J. and Mumme, Raymond P. and Court, Laurence E. and Huang, Zixun and He, Chenhang and Wang, Li-Wen and Ling, Sai Ho and Huynh, L{\^e} Duy and Boutry, Nicolas and Jakubicek, Roman and Chmelik, Jiri and Mulay, Supriti and Sivaprakasam, Mohanasankar and Paetzold, Johannes C. and Shit, Suprosanna and Ezhov, Ivan and Wiestler, Benedikt and Glocker, Ben and Valentinitsch, Alexander and Rempfler, Markus and Menze, Bj{\"o}rn H. and Kirschke, Jan S.}, title = {VerSe: A Vertebrae labelling and segmentation benchmark for multi-detector CT images}, series = {Medical Image Analysis}, volume = {73}, journal = {Medical Image Analysis}, doi = {10.1016/j.media.2021.102166}, abstract = {Vertebral labelling and segmentation are two fundamental tasks in an automated spine processing pipeline. Reliable and accurate processing of spine images is expected to benefit clinical decision support systems for diagnosis, surgery planning, and population-based analysis of spine and bone health. However, designing automated algorithms for spine processing is challenging predominantly due to considerable variations in anatomy and acquisition protocols and due to a severe shortage of publicly available data. Addressing these limitations, the Large Scale Vertebrae Segmentation Challenge (VerSe) was organised in conjunction with the International Conference on Medical Image Computing and Computer Assisted Intervention (MICCAI) in 2019 and 2020, with a call for algorithms tackling the labelling and segmentation of vertebrae. Two datasets containing a total of 374 multi-detector CT scans from 355 patients were prepared and 4505 vertebrae have individually been annotated at voxel level by a human-machine hybrid algorithm (https://osf.io/nqjyw/, https://osf.io/t98fz/). A total of 25 algorithms were benchmarked on these datasets. In this work, we present the results of this evaluation and further investigate the performance variation at the vertebra level, scan level, and different fields of view. We also evaluate the generalisability of the approaches to an implicit domain shift in data by evaluating the top-performing algorithms of one challenge iteration on data from the other iteration. The principal takeaway from VerSe: the performance of an algorithm in labelling and segmenting a spine scan hinges on its ability to correctly identify vertebrae in cases of rare anatomical variations. The VerSe content and code can be accessed at: https://github.com/anjany/verse.}, language = {en} } @article{SekuboyinaBayatHusseinietal., author = {Sekuboyina, Anjany and Bayat, Amirhossein and Husseini, Malek E. and L{\"o}ffler, Maximilian and Li, Hongwei and Tetteh, Giles and Kukačka, Jan and Payer, Christian and Štern, Darko and Urschler, Martin and Chen, Maodong and Cheng, Dalong and Lessmann, Nikolas and Hu, Yujin and Wang, Tianfu and Yang, Dong and Xu, Daguang and Ambellan, Felix and Amiranashvili, Tamaz and Ehlke, Moritz and Lamecker, Hans and Lehnert, Sebastian and Lirio, Marilia and de Olaguer, Nicol{\´a}s P{\´e}rez and Ramm, Heiko and Sahu, Manish and Tack, Alexander and Zachow, Stefan and Jiang, Tao and Ma, Xinjun and Angerman, Christoph and Wang, Xin and Wei, Qingyue and Brown, Kevin and Wolf, Matthias and Kirszenberg, Alexandre and Puybareau, {\´E}lodie and Valentinitsch, Alexander and Rempfler, Markus and Menze, Bj{\"o}rn H. and Kirschke, Jan S.}, title = {VerSe: A Vertebrae Labelling and Segmentation Benchmark for Multi-detector CT Images}, series = {arXiv}, journal = {arXiv}, language = {en} }