@inproceedings{VermeegenHerder2018, author = {Vermeegen, Kai and Herder, Jens}, title = {A Lighthouse-based Camera Tracking System for Professional Virtual Studios}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {19 -- 26}, year = {2018}, abstract = {This article describes the possibilities and problems that occur using the SteamVR tracking 2.0 system as a camera tracking system in a virtual studio and explains an approach for implementation and calibration within a professional studio environment. The tracking system allows for cost effective deployment. Relevant application fields are also mixed reality recording and streaming of AR and VR experiences.}, language = {en} } @inproceedings{HerderTakedaVermeegenetal.2019, author = {Herder, Jens and Takeda, Shinpei and Vermeegen, Kai and Davin, Till and Berners, Dominique and Ryskeldiev, Bektur and Zimmer, Christian and Druzetic, Ivana and Geiger, Christian}, title = {Mixed Reality Art Experiments - Immersive Access to Collective Memories}, series = {ISEA2019, Proceedings, 25th International Symposium on Electronic Art, Gwangju, South Korea, June 22-28, 2019}, booktitle = {ISEA2019, Proceedings, 25th International Symposium on Electronic Art, Gwangju, South Korea, June 22-28, 2019}, publisher = {IESA}, address = {Gwangju}, pages = {334 -- 341}, year = {2019}, abstract = {We report about several experiments on applying mixed reality technology in the context of accessing collective memories from atomic bombs, Holocaust and Second World War. We discuss the impact of Virtual Reality, Augmented Virtuality and Augmented Reality for specific memorial locations. We show how to use a virtual studio for demonstrating an augmented reality application for a specific location in a remote session within a video conference. Augmented Virtuality is used to recreate the local environment, thus providing a context and helping the participants recollect emotions related to a certain place. This technique demonstrates the advantages of using virtual (VR) and augmented (AR) reality environments for rapid prototyping and pitching project ideas in a live remote setting.}, language = {en} } @inproceedings{BallesterRipollHerderLadwigetal.2016, author = {Ballester Ripoll, Marina and Herder, Jens and Ladwig, Philipp and Vermeegen, Kai}, title = {Comparison of two Gesture Recognition Sensors for Virtual TV Studios}, series = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, booktitle = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, editor = {Pfeiffer, Thies and Fr{\"o}hlich, Julia and Kruse, Rolf}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-4718-9}, year = {2016}, abstract = {In order to improve the interactivity between users and computers, recent technologies focus on incorporating gesture recognition into interactive systems. The aim of this article is to evaluate the effectiveness of using a Myo control armband and the Kinect 2 for recognition of gestures in order to interact with virtual objects in a weather report scenario. The Myo armband has an inertial measurement unit and is able to read electrical activity produced by skeletal muscles, which can be recognized as gestures, which are trained by machine learning. A Kinect sensor was used to build up a dataset which contains motion recordings of 8 different gestures and was also build up by a gesture training machine learning algorithm. Both input methods, the Kinect 2 and the Myo armband, were evaluated with the same interaction patterns in a user study, which allows a direct comparison and reveals benefits and limits of each technique.}, language = {en} } @inproceedings{HerderLadwigVermeegenetal.2018, author = {Herder, Jens and Ladwig, Philipp and Vermeegen, Kai and Hergert, Dennis and Busch, Florian and Klever, Kevin and Holthausen, Sebastian and Ryskeldiev, Bektur}, title = {Mixed Reality Experience - How to Use a Virtual (TV) Studio for Demonstration of Virtual Reality Applications}, series = {GRAPP 2018 - 13th International Conference on Computer Graphics Theory and Applications}, booktitle = {GRAPP 2018 - 13th International Conference on Computer Graphics Theory and Applications}, publisher = {INSTICC}, address = {Setubal - Portugal}, isbn = {978-989-758-287-5}, doi = {10.5220/0006637502810287}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-15823}, pages = {281 -- 287}, year = {2018}, abstract = {The article discusses the question of "How to convey the experience in a virtual environment to third parties?" and explains the different technical implementations which can be used for live streaming and recording of a mixed reality experience. The real-world applications of our approach include education, entertainment, e- sports, tutorials, and cinematic trailers, which can benefit from our research by finding a suitable solution for their needs. We explain and outline our Mixed Reality systems as well as discuss the experience of recorded demonstrations of different VR applications, including the need for calibrated camera lens parameters based on realtime encoder values.}, language = {en} }