@inproceedings{Herder1997, author = {Herder, Jens}, title = {Cooperative Tools for Teaching : an Impact of a Network Environment}, series = {Annual Report of the Information Systems and Technology Center, University of Aizu, October 1997}, booktitle = {Annual Report of the Information Systems and Technology Center, University of Aizu, October 1997}, address = {Aizu}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-827}, pages = {3 -- 8}, year = {1997}, abstract = {Education at the University of Aizu is focussed upon computer science. Besides being the subject matter of many courses, however, the computer also plays a vital role in the educational process itself, both in the distribution of instructional media, and in providing students with valuable practical experience. All students have unlimited access (24-hours-a-day) to individual networked workstations, most of which are multimedia-capable (even video capture is possible in two exercise rooms). Without software and content tailored for computer-aided instruction, the hardware becomes an expensive decoration. In any case, there is a need to better educate the instructors and students in the use of the equipment. In the interest of facilitating effective, collaborative use of network-based computers in teaching, this article explores the impact that a network environment can have on such activities. First, as a general overview, and to examine the motivation for the use of a network environment in teaching, this article reviews a range of different styles of collaboration. Then the article shows what kind of tools are available for use, within the context of what has come to be called Computer-Supported Cooperative Work (CSCW).}, language = {en} } @inproceedings{BallesterRipollHerderLadwigetal.2016, author = {Ballester Ripoll, Marina and Herder, Jens and Ladwig, Philipp and Vermeegen, Kai}, title = {Comparison of two Gesture Recognition Sensors for Virtual TV Studios}, series = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, booktitle = {GI-VRAR, Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 13. Workshop der GI-Fachgruppe VR/AR,}, editor = {Pfeiffer, Thies and Fr{\"o}hlich, Julia and Kruse, Rolf}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-4718-9}, year = {2016}, abstract = {In order to improve the interactivity between users and computers, recent technologies focus on incorporating gesture recognition into interactive systems. The aim of this article is to evaluate the effectiveness of using a Myo control armband and the Kinect 2 for recognition of gestures in order to interact with virtual objects in a weather report scenario. The Myo armband has an inertial measurement unit and is able to read electrical activity produced by skeletal muscles, which can be recognized as gestures, which are trained by machine learning. A Kinect sensor was used to build up a dataset which contains motion recordings of 8 different gestures and was also build up by a gesture training machine learning algorithm. Both input methods, the Kinect 2 and the Myo armband, were evaluated with the same interaction patterns in a user study, which allows a direct comparison and reveals benefits and limits of each technique.}, language = {en} } @inproceedings{Herder2000, author = {Herder, Jens}, title = {Challenges of Virtual Sets: From Broadcasting to Interactive Media}, series = {Seventh International Workshop on Human}, booktitle = {Seventh International Workshop on Human}, publisher = {University of Aizu}, address = {Aizu-Wakamatsu}, pages = {13 -- 17}, year = {2000}, abstract = {Virtual sets have evolved from computer-generated, prerendered 2D backgrounds to realtime, responsive 3D computer graphics and are nowadays standard repertoire of broadcasting divisions. The graphics, which are combined with real video feed becoming moresophisticated, real looking and more responsive. We will look at the recent developments and suggest further developments like integration of spatial audio into the studio production and generating interactive media streams. Educational institutes recognize the demands of the rising media industry and established new courses on media technology like the Duesseldorf University of Applied Sciences.}, language = {en} } @inproceedings{DeppeNemitzHerder2018, author = {Deppe, Robert and Nemitz, Oliver and Herder, Jens}, title = {Augmented reality for supporting manual non-destructive ultrasonic testing of metal pipes and plates}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {45 -- 52}, year = {2018}, abstract = {We describe an application of augmented reality technology for non-destructive testing of products in the metal-industry. The prototype is created with hard- and software, that is usually employed in the gaming industry, and delivers positions for creating ultra- sonic material scans (C-scans). Using a stereo camera in combination with an hmd enables realtime visualisation of the probes path, as well as the setting of virtual markers on the specimen. As a part of the implementation the downhill simplex optimization algorithm is implemented to fit the specimen to a cloud of recorded surface points. The accuracy is statistically tested and evaluated with the result, that the tracking system is accurate up to ca. 1-2 millimeters in well set-up conditions. This paper is of interest not only for research institutes of the metal-industry, but also for any areas of work, in which the enhancement with augmented reality is possible and a precise tracking is necessary.}, language = {en} } @article{KuniiHerderMyszkowskietal.1994, author = {Kunii, Tosiyasu L. and Herder, Jens and Myszkowski, Karol and Okunev, Oleg and Okuneva, Galina and Ibusuki, Masumi}, title = {Articulation Simulation for an Intelligent Dental Care System}, series = {Displays}, volume = {15}, journal = {Displays}, number = {3}, pages = {181 -- 188}, year = {1994}, abstract = {CAD/CAM techniques are used increasingly in dentistry for design and fabrication of teeth restorations. An important issue is preserving occlusal contacts of teeth after restoration. Traditional techniques based on the use of casts with mechanical articulators require manual adjustment of occlusal surface, which becomes impractical when hard restoration materials like porcelain are used; they are also time and labor consuming. Most existing computer systems ignore completely such an articulation check, or perform the check at the level of a tooth and its immediate neighbors. We present a new mathematical model and a related user interface for global articulation simulation, developed for the Intelligent Dental Care System project. The aim of the simulation is elimination of the use of mechanical articulators and manual adjustment in the process of designing dental restorations and articulation diagnostic. The mathematical model is based upon differential topological modeling of the jawbs considered as a mechanical system. The user interface exploits metaphors that are familiar to dentists from everyday practice. A new input device designed specifically for use with articulation simulation is proposed.}, language = {en} } @inproceedings{RyskeldievCohenHerder2017, author = {Ryskeldiev, Bektur and Cohen, Michael and Herder, Jens}, title = {Applying rotational tracking and photospherical imagery to immersive mobile telepresence and live video streaming groupware}, series = {Proceeding SA '17 SIGGRAPH Asia 2017 Mobile Graphics \& Interactive Applications, Article No. 5}, booktitle = {Proceeding SA '17 SIGGRAPH Asia 2017 Mobile Graphics \& Interactive Applications, Article No. 5}, publisher = {ACM}, address = {New York}, isbn = {978-1-4503-5410-3}, doi = {10.1145/3132787.3132813}, pages = {2}, year = {2017}, abstract = {Mobile live video streaming is becoming an increasingly popular form of interaction both in social media and remote collaboration scenarios. However, in most cases the streamed video does not take mobile devices' spatial data into account (e.g., the viewers do not know the spatial orientation of a streamer), or use such data only in specific scenarios (e.g., to navigate around a spherical video stream).}, language = {en} } @inproceedings{Herder2001, author = {Herder, Jens}, title = {Applications of Spatial Auditory Displays in the Context of Art and Music}, series = {Human Supervision and Control in Engineering and Music}, booktitle = {Human Supervision and Control in Engineering and Music}, publisher = {Universit{\"a}t Kassel}, address = {Kassel}, year = {2001}, language = {en} } @article{AmanoMatsushitaYanagawaetal.1998, author = {Amano, Katsumi and Matsushita, Fumio and Yanagawa, Hirofumi and Cohen, Michael and Herder, Jens and Martens, William and Koba, Yoshiharu and Tohyama, Mikio}, title = {A Virtual Reality Sound System Using Room-Related Transfer Functions Delivered Through a Multispeaker Array: the PSFC at the University of Aizu Multimedia Center}, series = {TVRSJ}, volume = {3}, journal = {TVRSJ}, number = {1}, publisher = {J-STAGE}, doi = {10.18974/tvrsj.3.1_1}, pages = {1 -- 12}, year = {1998}, abstract = {The PSFC, or Pioneer Sound Field Controller, is a DSP-driven hemispherical loudspeaker array, installed at the University of Aizu Multimedia Center. The PSFC features realtime manipulation of the primary components of sound spatialization for each of two audio sources located in a virtual environment, including the content (apparent direction and distance) and context (room characteristics: reverberation level, room size and liveness). In an alternate mode, it can also direct the destination of the two separate input signals across 14 loudspeakers, manipulating the direction of the virtual sound sources with no control over apparent distance other than that afforded by source loudness (including no simulated environmental reflections or reverberation). The PSFC speaker dome is about 10 m in diameter, accommodating about fifty simultaneous users, including about twenty users comfortably standing or sitting near its ``sweet spot,'' the area in which the illusions of sound spatialization are most vivid. Collocated with a large screen rear-projection stereographic display, the PSFC is intended for advanced multimedia and virtual reality applications.}, language = {en} } @incollection{HerderMyszkowskiKuniietal.1996, author = {Herder, Jens and Myszkowski, Karol and Kunii, Tosiyasu L. and Ibusuki, Masumi}, title = {A Virtual Reality Interface to an Intelligent Dental Care System}, series = {Medicine Meets Virtual Reality 4}, booktitle = {Medicine Meets Virtual Reality 4}, editor = {Weghorst, Suzanne J. and Sieburg, Hans B. and Morgan, Karen S.}, publisher = {IOS Press}, address = {Amsterdam}, pages = {17 -- 20}, year = {1996}, language = {en} } @inproceedings{IshikawaHiroseHerder1998, author = {Ishikawa, Kimitaka and Hirose, Minefumi and Herder, Jens}, title = {A Sound Spatialization Server for a Speaker Array as an Integrated Part of a Virtual Environment}, series = {IEEE YUFORIC Germany 98}, booktitle = {IEEE YUFORIC Germany 98}, publisher = {IEEE}, address = {Stuttgart}, year = {1998}, abstract = {Spatial sound plays an important role in virtual reality environments, allowing orientation in space, giving a feeling of space, focusing the user on events in the scene, and substituting missing feedback cues (e.g., force feedback). The sound spatialization framework of the University of Aizu, which supports number of spatialization backends, has been extended to include a sound spatialization server for a multichannel loudspeaker array (Pioneer Sound Field Control System). Our goal is that the spatialization server allows easy integration into virtual environments. Modeling of distance cues, which are essential for full immersion, is discussed. Furthermore, the integration of this prototype into different applications allowed us to reveal the advantages and problems of spatial sound for virtual reality environments.}, language = {en} } @phdthesis{Herder1999, author = {Herder, Jens}, title = {A Sound Spatialization Resource Management Framework}, publisher = {University of Tsukuba}, address = {Tsukuba}, organization = {University of Tsukuba}, year = {1999}, abstract = {In a virtual reality environment, users are immersed in a scene with objects which might produce sound. The responsibility of a VR environment is to present these objects, but a practical system has only limited resources, including spatialization channels (mixels), MIDI/audio channels, and processing power. A sound spatialization resource manager, introduced in this thesis, controls sound resources and optimizes fidelity (presence) under given conditions, using a priority scheme based on psychoacoustics. Objects which are spatially close together can be coalesced by a novel clustering algorithm, which considers listener localization errors. Application programmers and VR scene designers are freed from the burden of assigning mixels and predicting sound source locations. The framework includes an abstract interface for sound spatialization backends, an API for the VR environments, and multimedia authoring tools.}, language = {en} } @article{HesseKoenigLogietal.1993, author = {Hesse, Jan and K{\"o}nig, Rainer and Logi, Filippo and Herder, Jens}, title = {A Prototype of an Interface Builder for the Common Lisp Interface Manager - CLIB}, series = {ACM Sigplan Notices}, volume = {28}, journal = {ACM Sigplan Notices}, number = {8}, publisher = {Forschungszentrum Informatik (FZI), Technical Expert Systems and Robotics}, doi = {10.1145/163114.163116}, pages = {19 -- 28}, year = {1993}, abstract = {The Common Lisp Interface Manager (CLIM) is used to develop graphical user interfaces for Lisp-basedapplications. With the prototype of the CLIM interface Builder (CLIB) the programmer can generate code for CLIM interactively. The developing process will be fast and less prone to errors. With this new tool, the interactive rapid prototyping reduces costs of a specification phase. Here we present the concept and first results of the prototype of CLIB.}, language = {en} } @inproceedings{PaulHerder2018, author = {Paul, Felix and Herder, Jens}, title = {A model-based filtering approach for real-time human motion data}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {37 -- 44}, year = {2018}, abstract = {Acquiring human motion data from video images plays an important role in the field of computer vision. Ground truth tracking systems require markers to create high quality motion data. But in many applications it is desired to work without markers. In recent years affordable hardware for markerless tracking systems was made available at a consumer level. Efficient depth camera systems based on Time-of-Flight sensors and structured light systems have made it possible to record motion data in real time. However, the gap between the quality of marker-based and markerless systems is high. The error sources of a markerless motion tracking pipeline are discussed and a model-based filter is proposed, which adapts depending on spatial location. The proposed method is then proven to be more robust and accurate than the unfiltered data stream and can be used to visually enhance the presence of an actor within a virtual environment in live broadcast productions.}, language = {en} } @inproceedings{VermeegenHerder2018, author = {Vermeegen, Kai and Herder, Jens}, title = {A Lighthouse-based Camera Tracking System for Professional Virtual Studios}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {19 -- 26}, year = {2018}, abstract = {This article describes the possibilities and problems that occur using the SteamVR tracking 2.0 system as a camera tracking system in a virtual studio and explains an approach for implementation and calibration within a professional studio environment. The tracking system allows for cost effective deployment. Relevant application fields are also mixed reality recording and streaming of AR and VR experiences.}, language = {en} } @inproceedings{MartensHerderShiba1999, author = {Martens, William L. and Herder, Jens and Shiba, Yoshiki}, title = {A filtering model for efficient rendering of the spatial image of an occluded virtual sound source}, series = {137th Regular Meeting of the Acoustical Society of America and the 2nd Convention of the European Acoustics Association}, booktitle = {137th Regular Meeting of the Acoustical Society of America and the 2nd Convention of the European Acoustics Association}, publisher = {Acoustical Society of America, European Acoustics Association}, address = {Berlin}, year = {1999}, abstract = {Rendering realistic spatial sound imagery for complex virtual environments must take into account the effects of obstructions such as reflectors and occluders. It is relatively well understood how to calculate the acoustical consequence that would be observed at a given observation point when an acoustically opaque object occludes a sound source. But the interference patterns generated by occluders of various geometries and orientations relative to the virtual source and receiver are computationally intense if accurate results are required. In many applications, however, it is sufficient to create a spatial image that is recognizable by the human listener as the sound of an occluded source. In the interest of improving audio rendering efficiency, a simplified filtering model was developed and its audio output submitted to psychophysical evaluation. Two perceptually salient components of occluder acoustics were identified that could be directly related to the geometry and orientation of a simple occluder. Actual occluder impulse responses measured in an anechoic chamber resembled the responses of a model incorporating only a variable duration delay line and a low-pass filter with variable cutoff frequenc}, language = {en} } @article{NovotnyJaenschHerder2005, author = {Novotny, Tom and Jaensch, Kai and Herder, Jens}, title = {A Database Driven and Virtual Reality supported Environment for Marketing Studies}, series = {Journal of the 3D-Forum Society}, volume = {19}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {95 -- 101}, year = {2005}, abstract = {In today's market research mechanisms multi modal technologies are significant tools to perform flexible and price efficient studies for not only consumer products but also consumer goods. Current appraisal mechanisms in combination with applied computer graphics can improve the assessment of a product's launch in the very early design phase or an innovation process. The combination of online questionnaires, Virtual Reality (VR) applications and a database management system offers a powerful tool to let a consumer judge products as well as innovated goods even without having produced a single article. In this paper we present an approach of consumer good studies consisting of common as well as interactive VR product presentations and online questionnaires bases on a bidirectional database management solution to configure and manage numerous studies, virtual sets, goods and participants in an effective way to support the estimation of the received data. Non-programmers can create their test environment including a VR scenario very quickly without any effort. Within the extensive knowledge of consumer goods, marketing instruments can be defined to shorten and improve the rollout process in the early product stages.}, language = {en} } @article{HerderYamazaki2000, author = {Herder, Jens and Yamazaki, Yasuhiro}, title = {A Chatspace Deploying Spatial Audio for Enhanced Conferencing}, series = {Journal of the 3D-Forum Society}, volume = {15}, journal = {Journal of the 3D-Forum Society}, number = {1}, year = {2000}, language = {en} } @inproceedings{HerderYamazaki2000, author = {Herder, Jens and Yamazaki, Yasuhiro}, title = {A Chatspace Deploying Spatial Audio for Enhanced Conferencing}, series = {Third International Conference on Human and Computer}, booktitle = {Third International Conference on Human and Computer}, publisher = {University of Aizu}, address = {Aizu-Wakamatsu}, pages = {197 -- 202}, year = {2000}, language = {en} } @inproceedings{BaranowskiUtzigFischeretal.2018, author = {Baranowski, Artur and Utzig, Sebastian and Fischer, Philipp and Gerndt, Andreas and Herder, Jens}, title = {3D spacecraft configuration using immersive AR technology}, series = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, booktitle = {Workshop Proceedings / Tagungsband: Virtuelle und Erweiterte Realit{\"a}t - 15. Workshop der GI-Fachgruppe VR/AR}, editor = {Herder, Jens and Geiger, Christian and D{\"o}rner, Ralf and Grimm, Paul}, publisher = {Shaker Verlag}, address = {Herzogenrath}, isbn = {978-3-8440-6215-1}, doi = {10.2370/9783844062151}, pages = {71 -- 82}, year = {2018}, abstract = {In this paper we propose an integrated immersive augmented reality solution for a software tool supporting spacecraft design and verification. The spacecraft design process relies on expertise in many domains, such as thermal and structural engineering. The various subsystems of a spacecraft are highly interdependent and have differing requirements and constraints. In this context, interactive visualizations play an important role in making expert knowledge accessible. Recent immersive display technologies offer new ways of presenting and interacting with computer-generated content. Possibilities and challenges for spacecraft configuration employing these technologies are explored and discussed. A user interface design for an application using the Microsoft HoloLens is proposed. To this end, techniques for selecting a spacecraft component and manipulating its position and orientation in 3D space are developed and evaluated. Thus, advantages and limitations of this approach to spacecraft configuration are revealed and discussed.}, language = {en} }