@article{AmanoMatsushitaYanagawaetal.1998, author = {Amano, Katsumi and Matsushita, Fumio and Yanagawa, Hirofumi and Cohen, Michael and Herder, Jens and Martens, William and Koba, Yoshiharu and Tohyama, Mikio}, title = {A Virtual Reality Sound System Using Room-Related Transfer Functions Delivered Through a Multispeaker Array: the PSFC at the University of Aizu Multimedia Center}, series = {TVRSJ}, volume = {3}, journal = {TVRSJ}, number = {1}, publisher = {J-STAGE}, doi = {10.18974/tvrsj.3.1_1}, pages = {1 -- 12}, year = {1998}, abstract = {The PSFC, or Pioneer Sound Field Controller, is a DSP-driven hemispherical loudspeaker array, installed at the University of Aizu Multimedia Center. The PSFC features realtime manipulation of the primary components of sound spatialization for each of two audio sources located in a virtual environment, including the content (apparent direction and distance) and context (room characteristics: reverberation level, room size and liveness). In an alternate mode, it can also direct the destination of the two separate input signals across 14 loudspeakers, manipulating the direction of the virtual sound sources with no control over apparent distance other than that afforded by source loudness (including no simulated environmental reflections or reverberation). The PSFC speaker dome is about 10 m in diameter, accommodating about fifty simultaneous users, including about twenty users comfortably standing or sitting near its ``sweet spot,'' the area in which the illusions of sound spatialization are most vivid. Collocated with a large screen rear-projection stereographic display, the PSFC is intended for advanced multimedia and virtual reality applications.}, language = {en} } @article{CohenHerderLMartens1999, author = {Cohen, Michael and Herder, Jens and L. Martens, William}, title = {Cyberspatial Audio Technology}, series = {The Journal of the Acoustical Society of Japan (E)}, volume = {20}, journal = {The Journal of the Acoustical Society of Japan (E)}, number = {6}, doi = {10.1250/ast.20.389}, pages = {389 -- 395}, year = {1999}, abstract = {Cyberspatial audio applications are distinguished from the broad range of spatial audio applications in a number of important ways that help to focus this review. Most significant is that cyberspatial audio is most often designed to be responsive to user inputs. In contrast to non-interactive auditory displays, cyberspatial auditory displays typically allow active exploration of the virtual environment in which users find themselves. Thus, at least some portion of the audio presented in a cyberspatial environment must be selected, processed, or otherwise rendered with minimum delay relative to user input. Besides the technological demands associated with realtime delivery of spatialized sound, the type and quality of auditory experiences supported are also very different from those associated with displays that support stationary sound localization.}, language = {en} } @article{DaemenHerderKochetal.2017, author = {Daemen, Jeff and Herder, Jens and Koch, Cornelius and Ladwig, Philipp and Wiche, Roman and Wilgen, Kai}, title = {Halbautomatische Steuerung von Kamera und Bildmischer bei Live-{\"U}bertragungen}, series = {Fachzeitschrift f{\"u}r Fernsehen, Film und Elektronische Medien}, journal = {Fachzeitschrift f{\"u}r Fernsehen, Film und Elektronische Medien}, number = {11}, publisher = {Schiele \& Sch{\"o}n}, pages = {501 -- 505}, year = {2017}, abstract = {Live-Video-Broadcasting mit mehreren Kameras erfordert eine Vielzahl von Fachkenntnissen. Robotersysteme erm{\"o}glichen zwar die Automatisierung von g{\"a}ngigen und wiederholten Tracking-Aufnahmen, diese erlauben jedoch keine kurzfristigen Anpassungen aufgrund von unvorhersehbaren Ereignissen. In diesem Beitrag wird ein modulares, automatisiertes Kamerasteuerungs- und Bildschnitt-System eingef{\"u}hrt, das auf grundlegenden kinematografischen Regeln basiert. Die Positionen der Akteure werden durch ein markerloses Tracking-System bereitgestellt. Dar{\"u}ber hinaus werden Tonpegel der Lavaliermikrofone der Akteure zur Analyse der aktuellen Szene verwendet. Ein Expertensystem ermittelt geeignete Kamerawinkel und entscheidet, wann von einer Kamera auf eine andere umgeschaltet werden soll. Eine Testproduktion wurde durchgef{\"u}hrt, um den entwickelten Prototyp in einem Live-Broadcast-Szenario zu beobachten und diente als Videodemonstration f{\"u}r eine Evaluierung.}, language = {de} } @article{GriesertWalczakHerder2003, author = {Griesert, Arnfried and Walczak, Oliver and Herder, Jens}, title = {Interactive Realtime Terrain Visualization for Virtual Set Applications}, series = {Journal of the 3D-Forum Society}, volume = {17}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {20 -- 26}, year = {2003}, abstract = {Virtual set environments for broadcasting become more sophisticated as well as the visual quality improves. Realtime interaction and production-specific visualization implemented through plugin mechanism enhance the existing systems like the virtual studio software 3DK. This work presents an algorithm which can dynamically manage textures of high resolution by prefetching them depending on their requirement in memory and map them on a procedural mesh in realtime. The main goal application of this work is the virtual representation of a flight over a landscape as part of weather reports in virtual studios and the interaction by the moderator.}, language = {en} } @article{Herder1998, author = {Herder, Jens}, title = {Sound Spatialization Framework: An Audio Toolkit for Virtual Environments}, series = {Journal of the 3D-Forum Society}, volume = {12}, journal = {Journal of the 3D-Forum Society}, number = {3}, pages = {17 -- 22}, year = {1998}, abstract = {The Sound Spatialization Framework is a C++ toolkit and development environment for providing advanced sound spatialization for virtual reality and multimedia applications. The Sound Spatialization Framework provides many powerful display and user-interface features not found in other sound spatialization software packages. It provides facilities that go beyond simple sound source spatialization: visualization and editing of the soundscape, multiple sinks, clustering of sound sources, monitoring and controlling resource management, support for various spatialization backends, and classes for MIDI animation and handling. Keywords: sound spatialization, resource management, virtual environments, spatial sound authoring, user interface design, human-machine interfaces}, language = {en} } @article{Herder2001, author = {Herder, Jens}, title = {Interactive Content Creation with Virtual Set Environments}, series = {Journal of the 3D-Forum Society}, volume = {15}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {53 -- 56}, year = {2001}, abstract = {Digital broadcasting enables interactive \sc tv studios with virtual set environments. This presentation reviews current technology and describes the requirements for such systems. Interoperability over the production, streaming, and viewer levels requires open interfaces. As the technology allow more interaction, it becomes inherent difficult to control the quality of the viewers experience.}, language = {en} } @article{Herder1999, author = {Herder, Jens}, title = {Visualization of a Clustering Algorithm of Sound Sources based on Localization Errors}, series = {Journal of the 3D-Forum Society}, volume = {13}, journal = {Journal of the 3D-Forum Society}, number = {3}, pages = {66 -- 70}, year = {1999}, abstract = {A module for soundscape monitoring and visualizing resource management processes was extended for presenting clusters, generated by a novel sound source clustering algorithm. This algorithm groups multiple sound sources together into a single representative source, considering localization errors depending on listener orientation. Localization errors are visualized for each cluster using resolution cones. Visualization is done in runtime and allows understanding and evaluation of the clustering algorithm.}, language = {en} } @article{Herder1998, author = {Herder, Jens}, title = {Tools and Widgets for Spatial Sound Authoring}, series = {Computer Networks \& ISDN Systems}, volume = {30}, journal = {Computer Networks \& ISDN Systems}, number = {20-21}, publisher = {Elsevier}, pages = {1933 -- 1940}, year = {1998}, language = {en} } @article{HerderCohen2002, author = {Herder, Jens and Cohen, Michael}, title = {The Helical Keyboard: Perspectives for Spatial Auditory Displays and Visual Music}, series = {Journal of New Music Research}, volume = {31}, journal = {Journal of New Music Research}, number = {3}, pages = {269 -- 281}, year = {2002}, abstract = {Auditory displays with the ability to dynamically spatialize virtual sound sources under real-time conditions enable advanced applications for art and music. A listener can be deeply immersed while interacting and participating in the experience. We review some of those applications while focusing on the Helical Keyboard project and discussing the required technology. Inspired by the cyclical nature of octaves and helical structure of a scale, a model of a piano-style keyboard was prepared, which was then geometrically warped into a helicoidal configuration, one octave/revolution, pitch mapped to height and chroma. It can be driven by MIDI events, real-time or sequenced, which stream is both synthesized and spatialized by a spatial sound display. The sound of the respective notes is spatialized with respect to sinks, avatars of the human user, by default in the tube of the helix. Alternative coloring schemes can be applied, including a color map compatible with chromastereoptic eyewear. The graphical display animates polygons, interpolating between the notes of a chord across the tube of the helix. Recognition of simple chords allows directionalization of all the notes of a major triad from the position of its musical root. The system is designed to allow, for instance, separate audition of harmony and melody, commonly played by the left and right hands, respectively, on a normal keyboard. Perhaps the most exotic feature of the interface is the ability to fork one{\~A}­s presence, replicating subject instead of object by installing multiple sinks at arbitrary places around a virtual scene so that, for example, harmony and melody can be separately spatialized, using two heads to normalize the octave; such a technique effectively doubles the helix from the perspective of a single listener. Rather than a symmetric arrangement of the individual helices, they are perceptually superimposed in-phase, co-extensively, so that corresponding notes in different registers are at the same azimuth.}, language = {en} } @article{HerderWoerzbergerTwelkeretal.2002, author = {Herder, Jens and W{\"o}rzberger, Ralf and Twelker, Uwe and Albertz, Stefan}, title = {Use of Virtual Environments in the Promotion and Evaluation of Architectural Designs}, series = {Journal of the 3D-Forum Society}, volume = {16}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {117 -- 122}, year = {2002}, abstract = {Virtual environments can create a realistic impression of an architectural space during the architectural design process, providing a powerful tool for evaluation and promotion during a project's early stages. In comparison to pre-rendered animations, such as walkthroughs based on CAD models, virtual environments can offer intuitive interaction and a more life like experience. Advanced virtual environments allow users to change realtime rendering features with a few manipulations, switching between different versions while still maintaining sensory immersion. This paper reports on an experimental project in which architectural models are being integrated into interactive virtual environments, and includes demonstrations of both the possibilities and limitations of such applications in evaluating, presenting and promoting architectural designs.}, language = {en} } @article{HerderYamazaki2000, author = {Herder, Jens and Yamazaki, Yasuhiro}, title = {A Chatspace Deploying Spatial Audio for Enhanced Conferencing}, series = {Journal of the 3D-Forum Society}, volume = {15}, journal = {Journal of the 3D-Forum Society}, number = {1}, year = {2000}, language = {en} } @article{HesseKoenigLogietal.1993, author = {Hesse, Jan and K{\"o}nig, Rainer and Logi, Filippo and Herder, Jens}, title = {A Prototype of an Interface Builder for the Common Lisp Interface Manager - CLIB}, series = {ACM Sigplan Notices}, volume = {28}, journal = {ACM Sigplan Notices}, number = {8}, publisher = {Forschungszentrum Informatik (FZI), Technical Expert Systems and Robotics}, doi = {10.1145/163114.163116}, pages = {19 -- 28}, year = {1993}, abstract = {The Common Lisp Interface Manager (CLIM) is used to develop graphical user interfaces for Lisp-basedapplications. With the prototype of the CLIM interface Builder (CLIB) the programmer can generate code for CLIM interactively. The developing process will be fast and less prone to errors. With this new tool, the interactive rapid prototyping reduces costs of a specification phase. Here we present the concept and first results of the prototype of CLIB.}, language = {en} } @article{HonnoSuzukiHerder2000, author = {Honno, Kuniaki and Suzuki, Kenji and Herder, Jens}, title = {Distance and Room Effects Control for the PSFC, an Auditory Display using a Loudspeaker Array}, series = {Journal of the 3D-Forum Society}, volume = {14}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {146 -- 151}, year = {2000}, abstract = {The Pioneer Sound Field Controller (PSFC), a loudspeaker array system, features realtime configuration of an entire sound field, including sound source direction, virtual distance, and context of simulated environment (room characteristics: room size and liveness) for each of two sound sources. In the PSFC system, there is no native parameter to specify the distance between the sound source and sound sink (listener) and also no function to control it directrly. This paper suggests the method to control virtual distance using basic parameters: volume, room size and liveness. The implementation of distance cue is an important aspect of 3D sounds. Virtual environments supporting room effects like reverberation not only gain realism but also provide additional information to users about surrounding space. The context switch of different aural attributes is done by using an API of the Sound Spatialization Framework. Therefore, when the sound sink move through two rooms, like a small bathroom and a large living room, the context of the sink switches and different sound is obtained.}, language = {en} } @article{JuttnerHerder2006, author = {Juttner, Carsten and Herder, Jens}, title = {Lighting an Interactive Scene in Real-time with a GPU and Video Textures}, series = {Journal of the 3D-Forum Society}, volume = {20}, journal = {Journal of the 3D-Forum Society}, number = {1}, pages = {22 -- 28}, year = {2006}, abstract = {The presentation of virtual environments in real time has always been a demanding task. Specially designed graphics hardware is necessary to deal with the large amounts of data these applications typically produce. For several years the chipsets that were used allowed only simple lighting models and fixed algorithms. But recent development has produced new graphics processing units (GPUs) that are much faster and more programmable than their predecessors. This paper presents an approach to take advantage of these new features. It uses a video texture as part of the lighting calculations for the passenger compartment of a virtual train and was run on the GPU of a recent PC graphics card. The task was to map the varying illumination of a filmed landscape onto the virtual objects and also onto another video texture (showing two passengers), thereby enhancing the realism of the scene.}, language = {en} } @article{KuniiHerderMyszkowskietal.1994, author = {Kunii, Tosiyasu L. and Herder, Jens and Myszkowski, Karol and Okunev, Oleg and Okuneva, Galina and Ibusuki, Masumi}, title = {Articulation Simulation for an Intelligent Dental Care System}, series = {Displays}, volume = {15}, journal = {Displays}, number = {3}, pages = {181 -- 188}, year = {1994}, abstract = {CAD/CAM techniques are used increasingly in dentistry for design and fabrication of teeth restorations. An important issue is preserving occlusal contacts of teeth after restoration. Traditional techniques based on the use of casts with mechanical articulators require manual adjustment of occlusal surface, which becomes impractical when hard restoration materials like porcelain are used; they are also time and labor consuming. Most existing computer systems ignore completely such an articulation check, or perform the check at the level of a tooth and its immediate neighbors. We present a new mathematical model and a related user interface for global articulation simulation, developed for the Intelligent Dental Care System project. The aim of the simulation is elimination of the use of mechanical articulators and manual adjustment in the process of designing dental restorations and articulation diagnostic. The mathematical model is based upon differential topological modeling of the jawbs considered as a mechanical system. The user interface exploits metaphors that are familiar to dentists from everyday practice. A new input device designed specifically for use with articulation simulation is proposed.}, language = {en} } @article{MartensHerder1999, author = {Martens, William L. and Herder, Jens}, title = {Perceptual criteria for eliminating reflectors and occluders for efficient rendering of environmental sound}, series = {The Journal of the Acoustical Society of America}, volume = {105}, journal = {The Journal of the Acoustical Society of America}, number = {2}, doi = {10.1121/1.425349}, pages = {979}, year = {1999}, language = {en} } @article{MartensHerderShiba1999, author = {Martens, William L. and Herder, Jens and Shiba, Yoshiki}, title = {A filtering model for efficient rendering of the spatial image of an occluded virtual sound source}, series = {The Journal of the Acoustical Society of America}, volume = {105}, journal = {The Journal of the Acoustical Society of America}, number = {2}, doi = {10.1121/1.425354}, pages = {980}, year = {1999}, abstract = {Rendering realistic spatial sound imagery for complex virtual environments must take into account the effects of obstructions such as reflectors and occluders. It is relatively well understood how to calculate the acoustical consequence that would be observed at a given observation point when an acoustically opaque object occludes a sound source. But the interference patterns generated by occluders of various geometries and orientations relative to the virtual source and receiver are computationally intense if accurate results are required. In many applications, however, it is sufficient to create a spatial image that is recognizable by the human listener as the sound of an occluded source. In the interest of improving audio rendering efficiency, a simplified filtering model was developed and its audio output submitted to psychophysical evaluation. Two perceptually salient components of occluder acoustics were identified that could be directly related to the geometry and orientation of a simple occluder. Actual occluder impulse responses measured in an anechoic chamber resembled the responses of a model incorporating only a variable duration delay line and a low-pass filter with variable cutoff frequency.}, language = {en} } @article{NovotnyJaenschHerder2005, author = {Novotny, Tom and Jaensch, Kai and Herder, Jens}, title = {A Database Driven and Virtual Reality supported Environment for Marketing Studies}, series = {Journal of the 3D-Forum Society}, volume = {19}, journal = {Journal of the 3D-Forum Society}, number = {4}, pages = {95 -- 101}, year = {2005}, abstract = {In today's market research mechanisms multi modal technologies are significant tools to perform flexible and price efficient studies for not only consumer products but also consumer goods. Current appraisal mechanisms in combination with applied computer graphics can improve the assessment of a product's launch in the very early design phase or an innovation process. The combination of online questionnaires, Virtual Reality (VR) applications and a database management system offers a powerful tool to let a consumer judge products as well as innovated goods even without having produced a single article. In this paper we present an approach of consumer good studies consisting of common as well as interactive VR product presentations and online questionnaires bases on a bidirectional database management solution to configure and manage numerous studies, virtual sets, goods and participants in an effective way to support the estimation of the received data. Non-programmers can create their test environment including a VR scenario very quickly without any effort. Within the extensive knowledge of consumer goods, marketing instruments can be defined to shorten and improve the rollout process in the early product stages.}, language = {en} } @article{RyskeldievCohenHerder2018, author = {Ryskeldiev, Bektur and Cohen, Michael and Herder, Jens}, title = {StreamSpace: Pervasive Mixed Reality Telepresence for Remote Collaboration on Mobile Devices}, series = {Journal of Information Processing}, volume = {26}, journal = {Journal of Information Processing}, publisher = {J-STAGE}, doi = {10.2197/ipsjjip.26.177}, pages = {177 -- 185}, year = {2018}, abstract = {We present a system that exploits mobile rotational tracking and photospherical imagery to allow users to share their environment with remotely connected peers "on the go." We surveyed related interfaces and developed a unique groupware application that shares a mixed reality space with spatially-oriented live video feeds. Users can collaborate through realtime audio, video, and drawings in a virtual space. The developed system was tested in a preliminary user study, which confirmed an increase in spatial and situational awareness among viewers as well as reduction in cognitive workload. Believing that our system provides a novel style of collaboration in mixed reality environments, we discuss future applications and extensions of our prototype.}, subject = {Ubiquitous Computing}, language = {en} } @article{StruchholzHerderLeckschat2006, author = {Struchholz, Holger and Herder, Jens and Leckschat, Dieter}, title = {Sound radiation simulation of musical instruments based on interpolation and filtering of multi-channel recordings}, series = {Journal of the 3D-Forum Society}, volume = {20}, journal = {Journal of the 3D-Forum Society}, number = {1}, pages = {41 -- 47}, year = {2006}, abstract = {With the virtual environment developed here, the characteristic sound radiation patterns of musical instruments can be experienced in real-time. The user may freely move around a musical instrument, thereby receiving acoustic and visual feedback in real-time. The perception of auditory and visual effects is intensified by the combination of acoustic and visual elements, as well as the option of user interaction. The simulation of characteristic sound radiation patterns is based on interpolating the intensities of a multichannel recording and offers a near-natural mapping of the sound radiation patterns. Additionally, a simple filter has been developed, enabling the qualitative simulation of an instrument's characteristic sound radiation patterns to be easily implemented within real-time 3D applications. Both methods of simulating sound radiation patterns have been evaluated for a saxophone with respect to their functionality and validity by means of spectral analysis and an auditory experiment.}, language = {en} }