@inproceedings{HerderCohen1996, author = {Herder, Jens and Cohen, Michael}, title = {Design of a Helical Keyboard}, series = {icad'96 - International Conference on Auditory Display, Palo Alto}, booktitle = {icad'96 - International Conference on Auditory Display, Palo Alto}, address = {Palo Alto}, year = {1996}, abstract = {Inspired by the cyclical nature of octaves and helical structure of a scale (Shepard, '82 and '83), we prepared a model of a piano-style keyboard (prototyped in Mathematica), which was then geometrically warped into a left-handed helical configuration, one octave/revolution, pitch mapped to height. The natural orientation of upper frequency keys higher on the helix suggests a parsimonious left-handed chirality, so that ascending notes cross in front of a typical listener left to right. Our model is being imported (via the dxf file format) into (Open Inventor/)VRML, where it can be driven by MIDI events, realtime or sequenced, which stream is both synthesized (by a Roland Sound Module), and spatialized by a heterogeneous spatial sound backend (including the Crystal River Engineering Acoustetron II and the Pioneer Sound Field Control speaker-array System), so that the sound of the respective notes is directionalized with respect to sinks, avatars of the human user, by default in the tube of the helix. This is a work-in-progress which we hope to be fully functional within the next few months.}, language = {en} } @techreport{ChristiansonHerder1995, author = {Christianson, Kiel and Herder, Jens}, title = {Mini-lectures in Computer Science on the WWW}, address = {Aizu}, organization = {University of Aizu, Center for Language Research}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-854}, year = {1995}, abstract = {The task of the Center for Language Research is to provide content-based English language instruction for students of computer science and engineering. As such, we find ourselves at the confluence of many of the streams currently running through the English Language Teaching profession, including English for Science and Technology (EST), English for Academic Purposes (EAP), English for Specific Purposes (ESP), Computer-assisted language learning (CALL), content-based instruction, and multimedia applications in foreign language pedagogy. This paper describes our initial attempts to construct a number of World Wide Web pages where students will be able to study EST, EAP, and computer science topics on their own in a multimedia environment.}, language = {en} } @inproceedings{Herder1997, author = {Herder, Jens}, title = {Cooperative Tools for Teaching : an Impact of a Network Environment}, series = {Annual Report of the Information Systems and Technology Center, University of Aizu, October 1997}, booktitle = {Annual Report of the Information Systems and Technology Center, University of Aizu, October 1997}, address = {Aizu}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-827}, pages = {3 -- 8}, year = {1997}, abstract = {Education at the University of Aizu is focussed upon computer science. Besides being the subject matter of many courses, however, the computer also plays a vital role in the educational process itself, both in the distribution of instructional media, and in providing students with valuable practical experience. All students have unlimited access (24-hours-a-day) to individual networked workstations, most of which are multimedia-capable (even video capture is possible in two exercise rooms). Without software and content tailored for computer-aided instruction, the hardware becomes an expensive decoration. In any case, there is a need to better educate the instructors and students in the use of the equipment. In the interest of facilitating effective, collaborative use of network-based computers in teaching, this article explores the impact that a network environment can have on such activities. First, as a general overview, and to examine the motivation for the use of a network environment in teaching, this article reviews a range of different styles of collaboration. Then the article shows what kind of tools are available for use, within the context of what has come to be called Computer-Supported Cooperative Work (CSCW).}, language = {en} } @inproceedings{Herder1998, author = {Herder, Jens}, title = {Sound Spatialization Framework: An Audio Toolkit for Virtual Environments}, series = {First International Conference on Human and Computer, Aizu-Wakamatsu, September 1998}, booktitle = {First International Conference on Human and Computer, Aizu-Wakamatsu, September 1998}, address = {Aizu}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-788}, pages = {6}, year = {1998}, abstract = {The Sound Spatialization Framework is a C++ toolkit and development environment for providing advanced sound spatialization for virtual reality and multimedia applications. The Sound Spatialization Framework provides many powerful display and user-interface features not found in other sound spatialization software packages. It provides facilities that go beyond simple sound source spatialization: visualization and editing of the soundscape, multiple sinks, clustering of sound sources, monitoring and controlling resource management, support for various spatialization backends, and classes for MIDI animation and handling.}, language = {en} } @inproceedings{Herder1997, author = {Herder, Jens}, title = {Tools and widgets for spatial sound authoring}, series = {CompuGraphics ' 97, Sixth International Conference on Computational Graphics and Visualization Techniques: Graphics in the Internet Age, Vilamoura, Portugal}, booktitle = {CompuGraphics ' 97, Sixth International Conference on Computational Graphics and Visualization Techniques: Graphics in the Internet Age, Vilamoura, Portugal}, address = {Portugal}, isbn = {972-8342-02-0}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-896}, pages = {87 -- 95}, year = {1997}, abstract = {Broader use of virtual reality environments and sophisticated animations spawn a need for spatial sound. Until now, spatial sound design has been based very much on experience and trial and error. Most effects are hand-crafted, because good design tools for spatial sound do not exist. This paper discusses spatial sound authoring and its applications, including shared virtual reality environments based on VRML. New utilities introduced by this research are an inspector for sound sources, an interactive resource manager, and a visual soundscape manipulator. The tools are part of a sound spatialization framework and allow a designer/author of multimedia content to monitor and debug sound events. Resource constraints like limited sound spatialization channels can also be simulated.}, language = {en} } @incollection{DavinHerder2021, author = {Davin, Till and Herder, Jens}, title = {Real-Time Relighting of Video Streams for Augmented Virtuality Scenes}, series = {GI VR / AR Workshop. Gesellschaft f{\"u}r Informatik e.V.}, booktitle = {GI VR / AR Workshop. Gesellschaft f{\"u}r Informatik e.V.}, editor = {Weier, Martin and Bues, Matthias and Wechner, Reto}, publisher = {Gesellschaft f{\"u}r Informatik e.V. (GI)}, address = {Bonn}, doi = {10.18420/vrar2021_6}, publisher = {Hochschule D{\"u}sseldorf}, pages = {16}, year = {2021}, language = {en} } @inproceedings{HonsbrokMostafawyHerderetal.2025, author = {Honsbrok, Jan and Mostafawy, Sina and Herder, Jens and Huldtgren, Alina}, title = {Ray-LUT: A Lookup-Based Method for Camera Lens Simulation in Real-Time Using Ray Tracing}, series = {Proceedings of the 20th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications}, booktitle = {Proceedings of the 20th International Joint Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications}, publisher = {SciTePress}, doi = {10.5220/0013105900003912}, pages = {177 -- 184}, year = {2025}, abstract = {Lens systems have a major influence on the image due to effects such as depth of field or optical aberrations. The only method to simulate these effects precisely is to trace rays through an actual lens system. This provides accurate results, but only with high computational effort. To speed up the ray tracing through the lens system, various acceleration methods have been developed, requiring considerable precomputations. We present a new method based on the Realistic Camera by Kolb et. al.. Instead of tracing each ray through the lens system, the rays are precomputed once and stored in a lookup table. In contrast to other methods, our method is simple, and does not require substantial preprocessing upfront. We can simulate complex effects such as chromatic aberrations accurately in real-time, regardless the number of lens surfaces in the system. Our method achieves the same performance as state-of-the-art methods like Polynomial Optics, while maintaining the same number of samples per pixel.}, language = {en} } @inproceedings{HerderNeiderKinuwaki2007, author = {Herder, Jens and Neider, Christian and Kinuwaki, Shinichi}, title = {HDR-based lighting estimation for virtual studio (TV) environments}, series = {10th International Conference on Human and Computer}, booktitle = {10th International Conference on Human and Computer}, address = {D{\"u}sseldorf, Aizu-Wakamatsu}, doi = {10.20385/opus4-1666}, url = {http://nbn-resolving.de/urn:nbn:de:hbz:due62-opus-16665}, pages = {111 -- 117}, year = {2007}, abstract = {Two high dynamic range HDR environments maps based on video streams from fish-eye lens cameras are used for generating virtual lights in a virtual set renderer. The task of realistic virtual light setup of scenes using captured environment maps might be eased as well as visual quality improves. We discuss the light setting problem for virtual studio tv productions which have mixed scenes of real objects, actors, virtual objects and virtual backgrounds. Benefits of hdr interactive light control are that the real light in the studio does not have to be remodeled and the artistic impression by using the light in the studio is also captured. An analysis of system requirements identifies technical challenges. We discuss the properties of a prototype system including test production.}, language = {en} } @incollection{MichaelisHerder2025, author = {Michaelis, Maja and Herder, Jens}, title = {Augmented Reality for Lighting Adjustment in a Virtual Studio}, series = {Augmented Reality - Situated Spatial Synergy [Working Title]}, booktitle = {Augmented Reality - Situated Spatial Synergy [Working Title]}, editor = {Cohen, Michael}, publisher = {IntechOpen}, address = {London}, doi = {10.5772/intechopen.1012424}, publisher = {Hochschule D{\"u}sseldorf}, year = {2025}, abstract = {An interactive virtual studio is used for live broadcasts with live interaction in front of the recording system. Advances in Augmented Reality (AR) using video see-through Head-Mounted Displays (HMDs) also make it possible to improve the production process itself. Lighting can be improved and adjusted using AR while seeing live effects. An Augmented Reality Lighting Adjustment System (ARLAS) for a virtual studio has been developed and evaluated. The traditional process for controlling lighting in a virtual studio is complicated and time-consuming. It often requires several people to control many different components at different locations. Using AR, this process is simplified by integrating relevant programs and video streams into a single application on an HMD, allowing direct control at lighting locations. Changes to settings are immediately visible on the real fixtures. Real lights can be replicated virtually to maintain consistent lighting conditions in virtual scenes, ensuring that virtual objects are lit as they would be in a real studio. The prototype demonstrated the benefits of AR for complex virtual studio setups and was evaluated by 18 participants.}, subject = {Erweiterte Realit{\"a}t (Informatik)}, language = {en} }