@inproceedings{KreimeierKargGoetzelmann2020, author = {Kreimeier, Julian and Karg, Pascal and G{\"o}tzelmann, Timo}, title = {BlindWalkVR}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3389193}, pages = {1 -- 8}, year = {2020}, abstract = {Virtual Reality (VR) promises expanded access to spatial information, especially for blind and visually impaired people. Through haptic and acoustic feedback, real world's limitations like the risk of injury or the necessity of a sighted safety assistant can be circumvented. However, the best possible profit of this technology requires interactive locomotion in large virtual environments to overcome real world space limitations. Thus, we present formative insights of blind people's egocentric VR locomotion by comparing four different implementations (i.e., two VR treadmills, trackers on the ankles or joystick based locomotion) in a qualitative and quantitative user study with seven blind and visually impaired participants. Our results reveal novel insights on characteristics of each implementation in terms of usability and practicability and also provide recommendations for further work in this field with the target user group in sight.}, language = {en} } @inproceedings{GoetzelmannEichler2015, author = {G{\"o}tzelmann, Timo and Eichler, Laura}, title = {BlindWeb Maps - An Interactive Web Service for the Selection and Generation of Personalized Audio-Tactile Maps}, series = {Proc. 15th International Conference on Computers Helping People with Special Needs}, volume = {2015}, booktitle = {Proc. 15th International Conference on Computers Helping People with Special Needs}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-41266-5}, pages = {139 -- 145}, year = {2015}, abstract = {Tactile maps may contribute to the orientation of blind people or alternatively be used for navigation. In the past, the generation of these maps was a manual task which considerably limited their availability. Nowadays, similar to visual maps, tactile maps can also be generated semi-automatically by tools and web services. The existing approaches enable users to generate maps by entering a specific address or point of interest. This can in principle be done by a blind user. However, these approaches actually show an image of the map on the users display which cannot be read by screen readers. Consequently, the blind user does not know what is on the map before it is printed. Ideally, the map selection process should give the user more information and freedom to select the desired excerpt. This paper introduces a novel web service for blind people to interactively select and automatically generate tactile maps. It adapts the interaction concept for map selection to the requirements of blind users whilst supporting multiple printing technologies. The integrated audio review of the map's contents allows earlier feedback to review if the currently selected map extract corresponds to the desired information need. Changes can be initiated before the map is printed which, especially for 3D printing, saves much time. The user is able to select map features to be included in the tactile map. Furthermore, the map rendering can be adapted to different zoom levels and supports multiple printing technologies. Finally, an evaluation with blind users was used to refine our approach.}, language = {en} } @inproceedings{KargKreimeierGoetzelmann2021, author = {Karg, Pascal and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Build-and-Touch: A Low-Cost, DIY, Open-Source Approach Towards Touchable Virtual Reality}, series = {Proceedings of the 14th PErvasive Technologies Related to Assistive Environments Conference}, booktitle = {Proceedings of the 14th PErvasive Technologies Related to Assistive Environments Conference}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3453892.3462217}, pages = {258 -- 259}, year = {2021}, abstract = {Virtual Reality (VR) is attracting more and more attention from academic research and practical application with the current availability of low-cost and end-user friendly devices. In terms of haptic (rather than visual) interaction, however, this technology is still in its infancy and there are few devices that are inexpensive and technologically simple to operate, respectively to procure. In this context, we present a concept of how haptic interaction with VR data gloves can also succeed by means of a commercially available web cam, sophisticated tracking software, and homemade low-cost hardware. All hardware and software components are to be obtained inexpensively or are open-source in order to achieve the greatest possible dissemination potential. With this work, we intend to provide an important trigger for future improvements and dissemination in terms of both technology and areas of application.}, language = {en} } @inproceedings{GoetzelmannSchneider2016, author = {G{\"o}tzelmann, Timo and Schneider, Daniel}, title = {CapCodes: Capacitive 3D Printable Identification and On-screen Tracking for Tangible Interaction}, series = {NordiCHI '16: Proceedings of the 9th Nordic Conference on Human-Computer Interaction}, volume = {2016}, booktitle = {NordiCHI '16: Proceedings of the 9th Nordic Conference on Human-Computer Interaction}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-4763-1}, doi = {10.1145/2971485.2971518}, pages = {4}, year = {2016}, abstract = {Electronic markers can be used to link physical representations and virtual content for tangible interaction, such as visual markers commonly used for tabletops. Another possibility is to leverage capacitive touch inputs of smartphones, tablets and notebooks. However, existing approaches either do not couple physical and virtual representations or require significant post-processing. This paper presents and evaluates a novel approach using a coding scheme for the automatic identification of tangibles by touch inputs when they are touched and shifted. The codes can be generated automatically and integrated into a great variety of existing 3D models from the internet. The resulting models can then be printed completely in one cycle by off-the-shelf 3D printers; post processing is not needed. Besides the identification, the object's position and orientation can be tracked by touch devices. Our evaluation examined multiple variables and showed that the CapCodes can be integrated into existing 3D models and the approach could also be applied to untouched use for larger tangibles.}, language = {en} } @inproceedings{Goetzelmann2015, author = {G{\"o}tzelmann, Timo}, title = {CapMaps: Capacitive Sensing 3D Printed Audio-Tactile Maps}, series = {Proc. 15th International Conference on Computers Helping People with Special Needs}, volume = {2015}, booktitle = {Proc. 15th International Conference on Computers Helping People with Special Needs}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-41266-5}, doi = {10.1007/978-3-319-41267-2_20}, pages = {146 -- 152}, year = {2015}, abstract = {Tactile maps can be useful tools for blind people for navigation and orientation tasks. Apart from static maps, there are techniques to augment tactile maps with audio content. They can be used to interact with the map content, to offer extra information and to reduce the tactile complexity of a map. Studies show that audio-tactile maps can be more efficient and satisfying for the user than pure tactile maps without audio feedback. A major challenge of audio-tactile maps is the linkage of tactile elements with audio content and interactivity. This paper introduces a novel approach to link 3D printed tactile maps with mobile devices, such as smartphones and tablets, in a flexible way to enable interactivity and audio-support. By integrating conductive filaments into the printed maps it seamlessly integrates into the 3D printing process. This allows to automatically recognize the tactile map by a single press at its corner. Additionally, the arrangement of the tactile map on the mobile device is flexible and detected automatically which eases the use of these maps. The practicability of this approach is shown by a dedicated feasibility study.}, language = {en} } @inproceedings{Goetzelmann2013, author = {G{\"o}tzelmann, Timo}, title = {Concept of the Joint Use of Smartphone Camera and Projector for Keyboard Inputs}, publisher = {Gediz University Press}, address = {Gediz}, issn = {2147-9097}, pages = {52 - 57}, year = {2013}, abstract = {The efficiency of text input by today's smartphones is significantly limited by the small extents of the virtual keyboard displayed for allowing alphanumeric inputs. Future smartphones will integrate projectors which allow to project multimedia content as well as the smartphones' dialogs. This paper introduces a concept to project the whole smartphone's display onto a surface allowing the user to realize text inputs by interacting with the virtual keyboard projection. This projection is analyzed by standard image processing algorithms. Finally, an experimental implementation shows the feasibility of this concept.}, subject = {Mensch-Maschine-Kommunikation}, language = {en} } @inproceedings{LyKargKreimeieretal.2022, author = {Ly, Kim and Karg, Pascal and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Development and Evaluation of a Low-cost Wheelchair Simulator for the Haptic Rendering of Virtual Road Conditions}, series = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3529190.3529195}, pages = {32 -- 39}, year = {2022}, abstract = {Many streets and buildings are not accessible for wheelchair users, which impedes a major challenge for their mobility. Often, such challenges can be considered during the planning stage, which is why it is important to include this user group in the planning process. Ideally, these planning efforts should also be made visually and haptically navigable using Virtual Reality (VR) technology to allow for better imagination, accurate conclusions, and awareness of wheelchair users. This work optimizes previous approaches so that different roadway conditions can be made haptically perceptible. The proposed prototype provides a non-contact and adjustable brake for simulating inclined planes, which can be adjusted depending on personal and environmental parameters. In addition, it can simulate roads such as cobblestones using a tactile transducer. The individually optimized components were combined into a complete VR system and integrated into a virtual environment for evaluation. The qualitative and quantitative results showed that realistic simulation is possible, but further development steps towards holistic and dissemination-capable hardware and software are needed. To this end, our contribution aims to improve the long-term involvement of wheelchair users in planning processes and increase awareness of their mobility situation.}, language = {en} } @article{KreimeierBielmeierGoetzelmann2018, author = {Kreimeier, Julian and Bielmeier, Thomas and G{\"o}tzelmann, Timo}, title = {Evaluation of Capacitive Markers Fabricated by 3D Printing, Laser Cutting and Prototyping}, series = {Inventions: Special Issue Innovations in 3-D Printing}, volume = {2018}, journal = {Inventions: Special Issue Innovations in 3-D Printing}, number = {Volume 3, Issue 1, Article 9}, publisher = {MDPI}, doi = {10.3390/inventions3010009}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5603}, pages = {18}, year = {2018}, abstract = {With Tangible User Interfaces, the computer user is able to interact in a fundamentally different and more intuitive way than with usual 2D displays. By grasping real physical objects, information can also be conveyed haptically, i.e., the user not only sees information on a 2D display, but can also grasp physical representations. To recognize such objects ("tangibles") it is skillful to use capacitive sensing, as it happens in most touch screens. Thus, real objects can be located and identified by the touch screen display automatically. Recent work already addressed such capacitive markers, but focused on their coding scheme and automated fabrication by 3D printing. This paper goes beyond the fabrication by 3D printers and, for the first time, applies the concept of capacitive codes to laser cutting and another immediate prototyping approach using modeling clay. Beside the evaluation of additional properties, we adapt recent research results regarding the optimized detection of tangible objects on capacitive screens. As a result of our comprehensive study, the detection performance is affected by the type of capacitive signal processing (respectively the device) and the geometry of the marker. 3D printing revealed to be the most reliable technique, though laser cutting and immediate prototyping of markers showed promising results. Based on our findings, we discuss individual strengths of each capacitive marker type.}, language = {en} } @inproceedings{KreimeierHammerFriedmannetal.2019, author = {Kreimeier, Julian and Hammer, Sebastian and Friedmann, Daniel and Karg, Pascal and B{\"u}hner, Clemens and Bankel, Lukas and G{\"o}tzelmann, Timo}, title = {Evaluation of Different Types of Haptic Feedback Influencing the Task-based Presence and Performance in Virtual Reality}, series = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, booktitle = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-6232-0}, doi = {10.1145/3316782.3321536}, pages = {289 -- 298}, year = {2019}, abstract = {Haptic feedback may support immersion and presence in virtual reality (VR) environments. The emerging market of consumer devices offers first devices which are expected to increase the degree of feeling being actually present in a virtual environment. In this paper we introduce a novel evaluation that examines the influence of different types of haptic feedback on presence and performance regarding manual tasks in VR. Therefore, we conducted a comprehensive user study involving 14 subjects, who performed throwing, stacking and object identification tasks in VR with visual (i.e., sensory substitution), vibrotactile or force feedback. We measured the degree of presence and task-related performance metrics. Our results indicate that regarding presence vibrotactile feedback outperforms haptic feedback which performs better than visual feedback only. In addition, force feedback significantly lowered the execution time for the throwing and the stacking task. In object identification tasks, the vibrotactile feedback increased the detection rates compared to the vibrotactile and force feedback, but also increased the required time of identification. Despite the inadequacies of the still young consumer technology, there were nevertheless strong indications of connections between presence, task fulfillment and the type of haptic feedback.}, language = {en} } @inproceedings{KreimeierGoetzelmann2018, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {FeelVR: Haptic Exploration of Virtual Objects}, series = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, volume = {2018}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, publisher = {ACM}, address = {New York}, isbn = {978-1-4503-6390-7}, doi = {10.1145/3197768.3201526}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5611}, pages = {122 -- 125}, year = {2018}, abstract = {The interest in virtual and augmented reality increased rapidly in the last years. Recently, haptic interaction and its applications get into focus. In this paper, we suggest the exploration of virtual objects using off-the-shelf VR game controllers. These are held like a pen with both hands and were used to palpate and identify the virtual object. Our study largely coincides with comparable previous work and shows that a ready-to-use VR system can be basically used for haptic exploration. The results indicate that virtual objects are more effectively recognized with closed eyes than with open eyes. In both cases, objects with a bigger morphological difference were identified the most frequently. The limitations due to quality and quantity of tactile feedback should be tackled in future studies that utilize currently developed wearable haptic devices and haptic rendering involving all fingers or even both hands. Thus, objects could be identifiable more intuitively and haptic feedback devices for interacting with virtual objects will be further disseminated.}, language = {en} }