@incollection{HerderJaenschGarbe2006, author = {Herder, Jens and Jaensch, Kai and Garbe, Katharina}, title = {Haptische Interaktionen in Testumgebungen f{\"u}r Produktpr{\"a}sentation in Virtuellen Umgebungen}, series = {Augmented and Virtual Reality in der Produktentstehung}, volume = {188}, booktitle = {Augmented and Virtual Reality in der Produktentstehung}, editor = {Gausemeier, J{\"u}rgen and Grafe, Michael}, publisher = {Heinz Nixdorf Institut, Universit{\"a}t Paderborn}, address = {Paderborn}, isbn = {3-939350-07-9}, pages = {87 -- 99}, year = {2006}, abstract = {Durch den vermehrten Einsatz von multimedialen Technologien werden in der Marktforschung die M{\"o}glichkeiten der Durchf{\"u}hrung flexibler und kosteng{\"u}nstiger Studien gegeben. In sehr fr{\"u}hen Phasen des Innovationsprozesses als Teil der Marktforschung k{\"o}nnen durch Einsatz von Virtuellen Umgebungen die Markteinf{\"u}hrungskonzepte f{\"u}r neue Produkte getestet werden. Mittels Anwendungen der Virtuellen Realit{\"a}t k{\"o}nnen neue Produkte einschlie{\"i}‚lich des Marketingkonzeptes auch haptisch getestet werden, ohne dass dieses Produkt bereits physisch vorhanden sein muss. Informationen werden dem Benutzer in Virtuellen Umgebungen haupts{\"a}chlich visuell und erg{\"a}nzend auditiv {\"u}bermittelt. Verbreitete Benutzerschnittstellen sind Interaktionsger{\"a}te wie Stylus und Wand. Durch die haptische Wahrnehmung werden Informationen menschengerechter, effektiver und intuitiver wahrgenommen. Objekte in einer virtuellen Umgebung k{\"o}nnen durch den Einsatz haptischer Interaktionsger{\"a}te ertastet und erf{\"u}hlt werden und machen dadurch eine differenziertere Beurteilung und Einsch{\"a}tzung durch den Benutzer eben dieser Objekte m{\"o}glich. Der Fokus des vorliegenden Projektes liegt daher auf der interaktiven haptischen Produktpr{\"a}sentation in einer virtuellen Einkaufsumgebung, die in Online-Befragungen mit zus{\"a}tzlichen Werbefilmen eingebettet ist. Als Nebenprodukt wurde das Werkzeug Open Inventor um Knoten zur Modellierung von haptischen Szeneneigenschaften erweitert.}, language = {de} } @inproceedings{HerderBrosdaDjuderijaetal.2011, author = {Herder, Jens and Brosda, Constantin and Djuderija, Sascha and Drochtert, Daniel and Genc, {\"O}mer and Joeres, Stephan and Kellerberg, Patrick and Looschen, Simon and Geiger, Christian and W{\"o}ldecke, Bj{\"o}rn}, title = {TouchPlanVS - A Tangible Multitouch Planning System for Virtual TV Studio Productions}, series = {2011 IEEE Symposium on 3D User Interfaces (3DUI)}, booktitle = {2011 IEEE Symposium on 3D User Interfaces (3DUI)}, publisher = {IEEE}, address = {Singapore}, isbn = {978-1-4577-0064-4}, doi = {10.1109/3DUI.2011.5759226}, pages = {103 -- 104}, year = {2011}, abstract = {This article presents a new approach of integrating tangible user feedback in todays virtual TV studio productions. We describe a tangible multitouch planning system, enabling multiple users to prepare and customize scene flow and settings. Users can collaboratively view and interact with virtual objects by using a tangible user interface on a shared multitouch surface. The in a 2D setting created TV scenes are simultaneously rendered on an external monitor, using a production/target renderer in 3D. Thereby the user experiences a closer reproduction of a final production. Subsequently, users are able to join together the scenes into one complex plot. Within the developing process, a video prototype of the system shows the user interaction and enables early reviews and evaluations. The requirement analysis is based on expert interviews.}, language = {en} } @inproceedings{GarbeHerbstHerder2007, author = {Garbe, Katharina and Herbst, Iris and Herder, Jens}, title = {Spatial Audio for Augmented Reality}, series = {10th International Conference on Human and Computer}, booktitle = {10th International Conference on Human and Computer}, address = {D{\"u}sseldorf, Aizu-Wakamatsu}, pages = {53 -- 58}, year = {2007}, abstract = {Using spatial audio successfully for augmented reality (AR) applications is a challenge, but is awarded with an improved user experience. Thus, we have extended the AR/VR framework \sc Morgan with spatial audio to improve users orientation in an AR application. In this paper, we investigate the users' capability to localize and memorize spatial sounds (registered with virtual or real objects). We discuss two scenarios. In the first scenario, the user localizes only sound sources and in the second scenario the user memorizes the location of audio-visual objects. Our results reflect spatial audio performance within the application domain and show which technology pitfalls still exist. Finally, we provide design recommendations for spatial audio AR environments.}, language = {en} } @inproceedings{AytenHerderVonolfen2010, author = {Ayten, H{\"u}seyin and Herder, Jens and Vonolfen, Wolfgang}, title = {Visual Acceptance Evaluation of Soft Shadow Algorithms for Virtual TV Studios}, series = {HC '10 Proceedings of the 13th International Conference on Humans and Computers}, booktitle = {HC '10 Proceedings of the 13th International Conference on Humans and Computers}, publisher = {University of Aizu Press}, address = {Aizu-Wakamatsu}, pages = {66 -- 71}, year = {2010}, abstract = {Shadows in computer graphics are an important rendering aspect for spatial objects. For realtime computer applications such as games, it is essential to represent shadows as accurate as possible. Also, various tv stations work with virtual studio systems instead of real studio sets. Especially for those systems, a realistic impression of the rendered and mixed scene is important. One challenge, hence, is the creation of a natural shadow impression. This paper presents the results of an empirical study to compare the performance and quality of different shadow mapping methods. For this test, a prototype studio renderer was developed. A percentage closer filter (pcf) with a number of specific resolutions is used to minimize the aliasing issue. More advanced algorithms which generate smooth shadows like the percentage closer soft shadow (pcss) method as well as the variance shadow maps (vsm) method are analysed. Different open source apis are used to develop the virtual studio renderer, giving the benefit of permanent enhancement. The Ogre 3D graphic engine is used to implement the rendering system, benefiting from various functions and plugins. The transmission of the tracking data is accomplished with the vrpn server/client and the Intersense api. The different shadow algorithms are compared in a virtual studio environment which also casts real shadows and thus gives a chance for a direct comparison throughout the empirical user study. The performance is measured in frames per secon}, language = {en} } @misc{OPUS4-1555, title = {{\"U}bersicht aller Amtliche Mitteilungen - Verk{\"u}ndungsbl{\"a}tter der Hochschule D{\"u}sseldorf (ehemals Fachhochschule D{\"u}sseldorf)}, address = {D{\"u}sseldorf}, organization = {Hochschule D{\"u}sseldorf}, abstract = {Die Amtlichen Mitteilungen - Verk{\"u}ndungsbl{\"a}tter der Hochschule D{\"u}sseldorf geh{\"o}ren zur Hochschulbibliogafie der HSD.}, language = {de} }