@inproceedings{BurgaDaemenDjuderijaetal.2013, author = {Burga, Jose and Daemen, Jeff and Djuderija, Sascha and Gnehr, Maren and Goossens, Lars and Hartz, Sven and Haufs-Brusberg, Peter and Herder, Jens and Ibrahim, Mohammed and Koop, Nikolas and Leske, Christophe and Meyer, Laurid and M{\"u}ller, Antje and Salgert, Bj{\"o}rn and Schroeder, Richard and Thiele, Simon}, title = {Four Metamorphosis States in a Distributed Virtual (TV) Studio: Human, Cyborg, Avatar, and Bot}, series = {10th International Conference on Visual Media Production (CVMP 2013), London}, booktitle = {10th International Conference on Visual Media Production (CVMP 2013), London}, address = {London}, year = {2013}, abstract = {The major challenge in virtual studio technology is the interaction between the actor and virtual objects. Within a distributed live production, two locally separated markerless tracking systems where used simultaneously alongside a virtual studio. The production was based on a fully tracked actor, cyborg (half actor, half graphics), avatar, and a bot. All participants could interact and throw a virtual disc. This setup is compared and mapped to Milgram's continuum and technical challenges are described.}, language = {en} } @inproceedings{BrosdaDaemenDjuderijaetal.2012, author = {Brosda, Constantin and Daemen, Jeff and Djuderija, Sascha and Joeres, Stephan and Langer, Oleg and Schweitzer, Andre and Wilhelm, Andreas and Herder, Jens}, title = {TouchPlanVS Lite - A Tablet-based Tangible Multitouch Planning System for Virtual TV Studio Productions}, series = {Proceedings of the 2012 Joint International Conference on Human-Centered Computer Environments}, booktitle = {Proceedings of the 2012 Joint International Conference on Human-Centered Computer Environments}, publisher = {ACM}, address = {New York}, isbn = {978-1-4503-1191-5}, pages = {64 -- 67}, year = {2012}, abstract = {This paper presents a mobile approach of integrating tangible user feedback in today's virtual TV studio productions. We describe a tangible multitouch planning system, enabling a single user to prepare and customize scene flow and settings. Users can view and interact with virtual objects by using a tangible user interface on a capacitive multitouch surface. In a 2D setting created TV scenes are simultaneously rendered as separate view using a production/target renderer in 3D. Thereby the user experiences a closer reproduction of a final production and set assets can be reused. Subsequently, a user can arrange scenes on a timeline while maintaining different versions/sequences. The system consists of a tablet and a workstation, which does all application processing and rendering. The tablet is just an interface connected via wireless LAN.}, language = {en} } @inproceedings{BaranowskiUtzigFischeretal.2018, author = {Baranowski, Artur and Utzig, Sebastian and Fischer, Philipp and Gerndt, Andreas and Herder, Jens}, title = {3D spacecraft configuration using immersive AR technology}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {71 -- 82}, year = {2018}, abstract = {In this paper we propose an integrated immersive augmented reality solution for a software tool supporting spacecraft design and verification. The spacecraft design process relies on expertise in many domains, such as thermal and structural engineering. The various subsystems of a spacecraft are highly interdependent and have differing requirements and constraints. In this context, interactive visualizations play an important role in making expert knowledge accessible. Recent immersive display technologies offer new ways of presenting and interacting with computer-generated content. Possibilities and challenges for spacecraft configuration employing these technologies are explored and discussed. A user interface design for an application using the Microsoft HoloLens is proposed. To this end, techniques for selecting a spacecraft component and manipulating its position and orientation in 3D space are developed and evaluated. Thus, advantages and limitations of this approach to spacecraft configuration are revealed and discussed.}, language = {en} } @inproceedings{BallesterRipollHerderLadwigetal.2016, author = {Ballester Ripoll, Marina and Herder, Jens and Ladwig, Philipp and Vermeegen, Kai}, title = {Comparison of two Gesture Recognition Sensors for Virtual TV Studios}, series = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, booktitle = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, editor = {Pfeiffer, Thies and Fr{\"o}hlich, Julia and Kruse, Rolf}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-4718-9}, year = {2016}, abstract = {In order to improve the interactivity between users and computers, recent technologies focus on incorporating gesture recognition into interactive systems. The aim of this article is to evaluate the effectiveness of using a Myo control armband and the Kinect 2 for recognition of gestures in order to interact with virtual objects in a weather report scenario. The Myo armband has an inertial measurement unit and is able to read electrical activity produced by skeletal muscles, which can be recognized as gestures, which are trained by machine learning. A Kinect sensor was used to build up a dataset which contains motion recordings of 8 different gestures and was also build up by a gesture training machine learning algorithm. Both input methods, the Kinect 2 and the Myo armband, were evaluated with the same interaction patterns in a user study, which allows a direct comparison and reveals benefits and limits of each technique.}, language = {en} } @inproceedings{AytenHerderVonolfen2010, author = {Ayten, H{\"u}seyin and Herder, Jens and Vonolfen, Wolfgang}, title = {Visual Acceptance Evaluation of Soft Shadow Algorithms for Virtual TV Studios}, series = {HC '10 Proceedings of the 13th International Conference on Humans and Computers}, booktitle = {HC '10 Proceedings of the 13th International Conference on Humans and Computers}, publisher = {University of Aizu Press}, address = {Aizu-Wakamatsu}, pages = {66 -- 71}, year = {2010}, abstract = {Shadows in computer graphics are an important rendering aspect for spatial objects. For realtime computer applications such as games, it is essential to represent shadows as accurate as possible. Also, various tv stations work with virtual studio systems instead of real studio sets. Especially for those systems, a realistic impression of the rendered and mixed scene is important. One challenge, hence, is the creation of a natural shadow impression. This paper presents the results of an empirical study to compare the performance and quality of different shadow mapping methods. For this test, a prototype studio renderer was developed. A percentage closer filter (pcf) with a number of specific resolutions is used to minimize the aliasing issue. More advanced algorithms which generate smooth shadows like the percentage closer soft shadow (pcss) method as well as the variance shadow maps (vsm) method are analysed. Different open source apis are used to develop the virtual studio renderer, giving the benefit of permanent enhancement. The Ogre 3D graphic engine is used to implement the rendering system, benefiting from various functions and plugins. The transmission of the tracking data is accomplished with the vrpn server/client and the Intersense api. The different shadow algorithms are compared in a virtual studio environment which also casts real shadows and thus gives a chance for a direct comparison throughout the empirical user study. The performance is measured in frames per secon}, language = {en} } @inproceedings{AndrukaniecFrankenKirchhofetal.2013, author = {Andrukaniec, Edward and Franken, Carmen and Kirchhof, Daniel and Kraus, Tobias and Sch{\"o}ndorff, Fabian and Geiger, Christian}, title = {OUTLIVE - An Augmented Reality Multi-user Board Game Played with a Mobile Device}, series = {Advances in Computer Entertainment. 10th International Conference, ACE 2013, Boekelo, The Netherlands, November 12-15, 2013. Proceedings}, volume = {8253}, booktitle = {Advances in Computer Entertainment. 10th International Conference, ACE 2013, Boekelo, The Netherlands, November 12-15, 2013. Proceedings}, editor = {Reidsma, Dennis and Katayose, Haruhiro and Nijholt, Anton}, publisher = {Springer International Publishing}, address = {Cham}, isbn = {978-3-319-03160-6}, doi = {10.1007/978-3-319-03161-3_38}, pages = {501 -- 504}, year = {2013}, language = {en} } @article{AmanoMatsushitaYanagawaetal.1998, author = {Amano, Katsumi and Matsushita, Fumio and Yanagawa, Hirofumi and Cohen, Michael and Herder, Jens and Martens, William and Koba, Yoshiharu and Tohyama, Mikio}, title = {A Virtual Reality Sound System Using Room-Related Transfer Functions Delivered Through a Multispeaker Array: the PSFC at the University of Aizu Multimedia Center}, series = {TVRSJ}, volume = {3}, journal = {TVRSJ}, number = {1}, publisher = {J-STAGE}, doi = {10.18974/tvrsj.3.1_1}, pages = {1 -- 12}, year = {1998}, abstract = {The PSFC, or Pioneer Sound Field Controller, is a DSP-driven hemispherical loudspeaker array, installed at the University of Aizu Multimedia Center. The PSFC features realtime manipulation of the primary components of sound spatialization for each of two audio sources located in a virtual environment, including the content (apparent direction and distance) and context (room characteristics: reverberation level, room size and liveness). In an alternate mode, it can also direct the destination of the two separate input signals across 14 loudspeakers, manipulating the direction of the virtual sound sources with no control over apparent distance other than that afforded by source loudness (including no simulated environmental reflections or reverberation). The PSFC speaker dome is about 10 m in diameter, accommodating about fifty simultaneous users, including about twenty users comfortably standing or sitting near its ``sweet spot,'' the area in which the illusions of sound spatialization are most vivid. Collocated with a large screen rear-projection stereographic display, the PSFC is intended for advanced multimedia and virtual reality applications.}, language = {en} } @inproceedings{AmanoMatsushitaYanagawaetal.1996, author = {Amano, Katsumi and Matsushita, Fumio and Yanagawa, Hirofumi and Cohen, Michael and Herder, Jens and Koba, Yoshiharu and Tohyama, Mikio}, title = {The Pioneer sound field control system at the University of Aizu Multimedia Center}, series = {RO-MAN '96 Tsukuba}, booktitle = {RO-MAN '96 Tsukuba}, publisher = {IEEE}, address = {Piscataway}, isbn = {0-7803-3253-9}, doi = {10.1109/ROMAN.1996.568887}, pages = {495 -- 499}, year = {1996}, abstract = {The PSFC, or Pioneer sound field control system, is a DSP-driven hemispherical 14-loudspeaker array, installed at the University of Aizu Multimedia Center. Collocated with a large screen rear-projection stereographic display the PSFC features realtime control of virtual room characteristics and direction of two separate sound channels, smoothly steering them around a configurable soundscape. The PSFC controls an entire sound field, including sound direction, virtual distance, and simulated environment (reverb level, room size and liveness) for each source. It can also configure a dry (DSP-less) switching matrix for direct directionalization. The PSFC speaker dome is about 14 m in diameter, allowing about twenty users at once to comfortably stand or sit near its sweet spot.}, language = {en} } @inproceedings{OPUS4-1625, title = {Virtuelle und Erweiterte Realit{\"a}t - 9. Workshop der GI-Fachgruppe VR/AR}, editor = {Geiger, Christian and Herder, Jens and Vierjahn, Tom}, publisher = {Shaker Verlag}, address = {Aachen}, isbn = {978-3-8440-1309-2}, year = {2012}, abstract = {Der neunte Workshop "Virtuelle und Erweiterte Realit{\"a}t" der Fachgruppe VR/AR der Gesellschaft f{\"u}r Informatik e.V. >> wurde an der FH D{\"u}sseldorf vom 19.09. - 20.09.2012 durchgef{\"u}hrt. Dies ist der Tagungsband des neunten Workshops zum Thema "Virtuelle und Erweiterte Realit{\"a}t", die von der Fachgruppe VR/AR der Gesellschaft f{\"u}r Informatik e.V. ins Leben gerufen wurde. Als etablierte Plattform f{\"u}r den Informations- und Ideenaustausch der deutschsprachigen VR/AR-Szene bot der Workshop den idealen Rahmen, aktuelle Ergebnisse und Vorhaben aus Forschung und Entwicklung - auch provokative Ideen - im Kreise eines fachkundigen Publikums zur Diskussion zu stellen. Insbesondere wollten wir auch jungen Nachwuchswissenschaftlern die M{\"o}glichkeit geben, ihre Arbeiten zu pr{\"a}sentieren; darunter fielen laufende Promotionsvorhaben oder auch herausragende studentische Arbeiten von Hochschulen und Forschungseinrichtungen. Eine Auswahl der besten Artikel aus dem Workshop werden in einer Sonderausgabe des Journal of Virtual Reality and Broadcasting (JVRB) ver{\"o}ffentlicht.}, language = {mul} } @inproceedings{OPUS4-1587, title = {Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, year = {2018}, abstract = {Der f{\"u}nfzehnte Workshop "Virtuelle und Erweiterte Realit{\"a}t" der Fachgruppe VR/AR der Gesellschaft f{\"u}r Informatik e.V. wurde an der Hochschule D{\"u}sseldorf vom 10.-11.10.2018 durchgef{\"u}hrt. Als etablierte Plattform f{\"u}r den Informations- und Ideenaustausch der deutschsprachigen VR/AR-Szene bot der Workshop den idealen Rahmen, aktuelle Ergebnisse und Vorhaben aus Forschung und Entwicklung - auch provokative Ideen - im Kreise eines fachkundigen Publikums zur Diskussion zu stellen. Insbesondere wurde auch jungen Nachwuchswissenschaftlern die M{\"o}glichkeit gegeben, ihre Arbeiten zu pr{\"a}sentieren; darunter fielen laufende Promotionsvorhaben oder auch herausragende studentische Arbeiten von Hochschulen und Forschungseinrichtungen. Das Programmkomitee hat 12 Lang- und 6 Kurzbeitr{\"a}ge aus {\"u}ber 29 Gesamteinreichungen ausgew{\"a}hlt. Die Beitr{\"a}ge decken das Spektrum der Virtuellen und Erweiterten Realit{\"a}t ab. Der Workshop fand am 10. Oktober gleichzeitig mit dem Innovationstag des Innovationshubs in D{\"u}sseldorf statt. F{\"u}r einen Beitrag eigneten sich alle Themenfelder der "Virtuellen und Erweiterten Realit{\"a}t", insbesondere: 3D Eingabeger{\"a}te und Interaktionstechniken Avatare und Agenten Displaytechnologien und Tracking (Echtzeit-)Rendering Education und Edutainment Entertainment und Experiences Gesellschaft und soziotechnische Aspekte Human Factors Industrielle Einsatzszenarien Innovative Anwendungen K{\"u}nstlerische Anwendungen Modellierung und Simulation Multimodale Interaktion Systemarchitekturen und Intelligente Umgebungen Verteilte und kooperative VR/AR-Umgebungen}, language = {mul} } @periodical{OPUS4-1502, title = {Journal of Virtual Reality and Broadcasting}, editor = {Herder, Jens}, publisher = {Hochschule D{\"u}sseldorf}, address = {D{\"u}sseldorf}, issn = {1860-2037}, year = {2019}, abstract = {The "Journal of Virtual Reality and Broadcasting" is an open access E-journal covering advanced media technology for the integration of human computer interaction and modern information systems. The main focus is on the creation of synergies between such basic technologies as computer graphics and state-of-the-art broadcasting techniques. The main goals are to publish research results in the field of Virtual Reality and Broadcasting, to provoke discussions, and to promote the exchange of ideas and information. Developments in the area have a direct effect on society, therefore social aspects will also be considered. As an interdisciplinary field Virtual Reality requires multilateral collaboration in order to enable new applications. RB publishes articles consecutively and in electronic form only. All articles are peer-reviewed in a strict review process by at least three independent experts from the appropriate field of research and appear in the English language. The articles are organized in one volume per year with ten to twenty articles. Material that has been previously presented at conferences undergo a major revision and are extended and modified by the authors with at least 20\% new material according to the Journal's policy for previously published articles. The Journal has been established in 2004. Currently, the submission and publication of articles is free of charge. No author fees are applied.}, language = {en} }