@article{GreweLiuHildebrandtetal., author = {Grewe, Carl Martin and Liu, Tuo and Hildebrandt, Andrea and Zachow, Stefan}, title = {The Open Virtual Mirror Framework for Enfacement Illusions - Enhancing the Sense of Agency With Avatars That Imitate Facial Expressions}, series = {Behavior Research Methods}, journal = {Behavior Research Methods}, publisher = {Springer}, doi = {10.3758/s13428-021-01761-9}, language = {de} } @article{GreweLiuKahletal., author = {Grewe, Carl Martin and Liu, Tuo and Kahl, Christoph and Andrea, Hildebrandt and Zachow, Stefan}, title = {Statistical Learning of Facial Expressions Improves Realism of Animated Avatar Faces}, series = {Frontiers in Virtual Reality}, volume = {2}, journal = {Frontiers in Virtual Reality}, publisher = {Frontiers}, doi = {10.3389/frvir.2021.619811}, pages = {1 -- 13}, language = {en} } @misc{GreweLeRouxPilzetal., author = {Grewe, Carl Martin and Le Roux, Gabriel and Pilz, Sven-Kristofer and Zachow, Stefan}, title = {Spotting the Details: The Various Facets of Facial Expressions}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-67696}, abstract = {3D Morphable Models (MM) are a popular tool for analysis and synthesis of facial expressions. They represent plausible variations in facial shape and appearance within a low-dimensional parameter space. Fitted to a face scan, the model's parameters compactly encode its expression patterns. This expression code can be used, for instance, as a feature in automatic facial expression recognition. For accurate classification, an MM that can adequately represent the various characteristic facets and variants of each expression is necessary. Currently available MMs are limited in the diversity of expression patterns. We present a novel high-quality Facial Expression Morphable Model built from a large-scale face database as a tool for expression analysis and synthesis. Establishment of accurate dense correspondence, up to finest skin features, enables a detailed statistical analysis of facial expressions. Various characteristic shape patterns are identified for each expression. The results of our analysis give rise to a new facial expression code. We demonstrate the advantages of such a code for the automatic recognition of expressions, and compare the accuracy of our classifier to state-of-the-art.}, language = {en} } @inproceedings{GreweleRouxPilzetal., author = {Grewe, Carl Martin and le Roux, Gabriel and Pilz, Sven-Kristofer and Zachow, Stefan}, title = {Spotting the Details: The Various Facets of Facial Expressions}, series = {IEEE International Conference on Automatic Face and Gesture Recognition}, booktitle = {IEEE International Conference on Automatic Face and Gesture Recognition}, doi = {10.1109/FG.2018.00049}, pages = {286 -- 293}, language = {en} } @inproceedings{SiqueiraRodriguesRiehmZachowetal., author = {Siqueira Rodrigues, Lucas and Riehm, Felix and Zachow, Stefan and Israel, Johann Habakuk}, title = {VoxSculpt: An Open-Source Voxel Library for Tomographic Volume Sculpting in Virtual Reality}, series = {2023 9th International Conference on Virtual Reality (ICVR), Xianyang, China, 2023}, booktitle = {2023 9th International Conference on Virtual Reality (ICVR), Xianyang, China, 2023}, doi = {10.1109/ICVR57957.2023.10169420}, pages = {515 -- 523}, abstract = {Manual processing of tomographic data volumes, such as interactive image segmentation in medicine or paleontology, is considered a time-consuming and cumbersome endeavor. Immersive volume sculpting stands as a potential solution to improve its efficiency and intuitiveness. However, current open-source software solutions do not yield the required performance and functionalities. We address this issue by contributing a novel open-source game engine voxel library that supports real-time immersive volume sculpting. Our design leverages GPU instancing, parallel computing, and a chunk-based data structure to optimize collision detection and rendering. We have implemented features that enable fast voxel interaction and improve precision. Our benchmark evaluation indicates that our implementation offers a significant improvement over the state-of-the-art and can render and modify millions of visible voxels while maintaining stable performance for real-time interaction in virtual reality.}, language = {en} } @article{WagendorfNahlesVachetal., author = {Wagendorf, Oliver and Nahles, Susanne and Vach, Kirstin and Kernen, Florian and Zachow, Stefan and Heiland, Max and Fl{\"u}gge, Tabea}, title = {The impact of teeth and dental restorations on gray value distribution in cone-beam computer tomography - a pilot study}, series = {International Journal of Implant Dentistry}, volume = {9}, journal = {International Journal of Implant Dentistry}, number = {27}, doi = {10.1186/s40729-023-00493-z}, abstract = {Purpose: To investigate the influence of teeth and dental restorations on the facial skeleton's gray value distributions in cone-beam computed tomography (CBCT). Methods: Gray value selection for the upper and lower jaw segmentation was performed in 40 patients. In total, CBCT data of 20 maxillae and 20 mandibles, ten partial edentulous and ten fully edentulous in each jaw, respectively, were evaluated using two different gray value selection procedures: manual lower threshold selection and automated lower threshold selection. Two sample t tests, linear regression models, linear mixed models, and Pearson's correlation coefficients were computed to evaluate the influence of teeth, dental restorations, and threshold selection procedures on gray value distributions. Results: Manual threshold selection resulted in significantly different gray values in the fully and partially edentulous mandible. (p = 0.015, difference 123). In automated threshold selection, only tendencies to different gray values in fully edentulous compared to partially edentulous jaws were observed (difference: 58-75). Significantly different gray values were evaluated for threshold selection approaches, independent of the dental situation of the analyzed jaw. No significant correlation between the number of teeth and gray values was assessed, but a trend towards higher gray values in patients with more teeth was noted. Conclusions: Standard gray values derived from CT imaging do not apply for threshold-based bone segmentation in CBCT. Teeth influence gray values and segmentation results. Inaccurate bone segmentation may result in ill-fitting surgical guides produced on CBCT data and misinterpreting bone density, which is crucial for selecting surgical protocols.}, language = {en} } @misc{EhlkeHeylandMaerdianetal., author = {Ehlke, Moritz and Heyland, Mark and M{\"a}rdian, Sven and Duda, Georg and Zachow, Stefan}, title = {3D Assessment of Osteosynthesis based on 2D Radiographs}, issn = {1438-0064}, url = {http://nbn-resolving.de/urn:nbn:de:0297-zib-56217}, abstract = {We present a novel method to derive the surface distance of an osteosynthesis plate w.r.t. the patient-specific surface of the distal femur based on postoperative 2D radiographs. In a first step, the implant geometry is used as a calibration object to relate the implant and the individual X-ray images spatially in a virtual X-ray setup. Second, the patient-specific femoral shape and pose are reconstructed by fitting a deformable statistical shape and intensity model (SSIM) to the X-rays. The relative positioning between femur and implant is then assessed in terms of the displacement between the reconstructed 3D shape of the femur and the plate. We believe that the approach presented in this paper constitutes a meaningful tool to elucidate the effect of implant positioning on fracture healing and, ultimately, to derive load recommendations after surgery.}, language = {en} } @inproceedings{KraemerHerrmannBoethetal., author = {Kr{\"a}mer, Martin and Herrmann, Karl-Heinz and Boeth, Heide and Tycowicz, Christoph von and K{\"o}nig, Christian and Zachow, Stefan and Ehrig, Rainald and Hege, Hans-Christian and Duda, Georg and Reichenbach, J{\"u}rgen}, title = {Measuring 3D knee dynamics using center out radial ultra-short echo time trajectories with a low cost experimental setup}, series = {ISMRM (International Society for Magnetic Resonance in Medicine), 23rd Annual Meeting 2015, Toronto, Canada}, booktitle = {ISMRM (International Society for Magnetic Resonance in Medicine), 23rd Annual Meeting 2015, Toronto, Canada}, language = {en} } @inproceedings{EhlkeHeylandMaerdianetal., author = {Ehlke, Moritz and Heyland, Mark and M{\"a}rdian, Sven and Duda, Georg and Zachow, Stefan}, title = {Assessing the relative positioning of an osteosynthesis plate to the patient-specific femoral shape from plain 2D radiographs}, series = {Proceedings of the 15th Annual Meeting of CAOS-International (CAOS)}, booktitle = {Proceedings of the 15th Annual Meeting of CAOS-International (CAOS)}, abstract = {We present a novel method to derive the surface distance of an osteosynthesis plate w.r.t. the patient­specific surface of the distal femur based on 2D X­ray images. Our goal is to study from clinical data, how the plate­to­bone distance affects bone healing. The patient­specific 3D shape of the femur is, however, seldom recorded for cases of femoral osteosynthesis since this typically requires Computed Tomography (CT), which comes at high cost and radiation dose. Our method instead utilizes two postoperative X­ray images to derive the femoral shape and thus can be applied on radiographs that are taken in clinical routine for follow­up. First, the implant geometry is used as a calibration object to relate the implant and the individual X­ray images spatially in a virtual X­ray setup. In a second step, the patient­specific femoral shape and pose are reconstructed in the virtual setup by fitting a deformable statistical shape and intensity model (SSIM) to the images. The relative positioning between femur and implant is then assessed in terms of displacement between the reconstructed 3D shape of the femur and the plate. A preliminary evaluation based on 4 cadaver datasets shows that the method derives the plate­to­bone distance with a mean absolute error of less than 1mm and a maximum error of 4.7 mm compared to ground truth from CT. We believe that the approach presented in this paper constitutes a meaningful tool to elucidate the effect of implant positioning on fracture healing.}, language = {en} } @inproceedings{EhlkeHeylandMaerdianetal., author = {Ehlke, Moritz and Heyland, Mark and M{\"a}rdian, Sven and Duda, Georg and Zachow, Stefan}, title = {3D Assessment of Osteosynthesis based on 2D Radiographs}, series = {Proceedings of the Jahrestagung der Deutschen Gesellschaft f{\"u}r Computer- und Roboterassistierte Chirurgie (CURAC)}, booktitle = {Proceedings of the Jahrestagung der Deutschen Gesellschaft f{\"u}r Computer- und Roboterassistierte Chirurgie (CURAC)}, pages = {317 -- 321}, abstract = {We present a novel method to derive the surface distance of an osteosynthesis plate w.r.t. the patient-specific surface of the distal femur based on postoperative 2D radiographs. In a first step, the implant geometry is used as a calibration object to relate the implant and the individual X-ray images spatially in a virtual X-ray setup. Second, the patient- specific femoral shape and pose are reconstructed by fitting a deformable statistical shape and intensity model (SSIM) to the X-rays. The relative positioning between femur and implant is then assessed in terms of the displacement between the reconstructed 3D shape of the femur and the plate. We believe that the approach presented in this paper constitutes a meaningful tool to elucidate the effect of implant positioning on fracture healing and, ultimately, to derive load recommendations after surgery.}, language = {en} }