@inproceedings{KreimeierGoetzelmann2018, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Real World VR Proxies to Support Blind People in Mobility Training}, series = {Proceedings of Mensch und Computer 2018 (MuC'18)}, volume = {2018}, booktitle = {Proceedings of Mensch und Computer 2018 (MuC'18)}, publisher = {Gesellschaft f{\"u}r Informatik e.V.}, address = {Bonn}, doi = {10.18420/muc2018-demo-0484}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5592}, pages = {1013 -- 1017}, year = {2018}, abstract = {Mobility training is an essential part of blind people's education in order to move in public spaces. In order to safely learn new routes in public space, however, a seeing trainer must assist the blind person. With the increasing availability of VR hardware, it is possible to transfer real spatial environments to virtual representations. The digitized environments can be used as a basis for this training without safety problems by real world hazards. This allows to cope with the limited resources of sighted assistants and enables blind people to become more independent. We propose to capture real public spaces (such as sidewalks, train stations etc.) and make them in this way ascertainable. Orientation and mobility can be trained in this digital model via multimodal sensory feedback while involving intuitive locomotion and white cane exploration. This paper sketches the related work and proposes our novel approach. Furthermore, we suggest additional improvements on our ongoing research.}, language = {de} } @article{KreimeierBielmeierGoetzelmann2018, author = {Kreimeier, Julian and Bielmeier, Thomas and G{\"o}tzelmann, Timo}, title = {Evaluation of Capacitive Markers Fabricated by 3D Printing, Laser Cutting and Prototyping}, series = {Inventions: Special Issue Innovations in 3-D Printing}, volume = {2018}, journal = {Inventions: Special Issue Innovations in 3-D Printing}, number = {Volume 3, Issue 1, Article 9}, publisher = {MDPI}, doi = {10.3390/inventions3010009}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5603}, pages = {18}, year = {2018}, abstract = {With Tangible User Interfaces, the computer user is able to interact in a fundamentally different and more intuitive way than with usual 2D displays. By grasping real physical objects, information can also be conveyed haptically, i.e., the user not only sees information on a 2D display, but can also grasp physical representations. To recognize such objects ("tangibles") it is skillful to use capacitive sensing, as it happens in most touch screens. Thus, real objects can be located and identified by the touch screen display automatically. Recent work already addressed such capacitive markers, but focused on their coding scheme and automated fabrication by 3D printing. This paper goes beyond the fabrication by 3D printers and, for the first time, applies the concept of capacitive codes to laser cutting and another immediate prototyping approach using modeling clay. Beside the evaluation of additional properties, we adapt recent research results regarding the optimized detection of tangible objects on capacitive screens. As a result of our comprehensive study, the detection performance is affected by the type of capacitive signal processing (respectively the device) and the geometry of the marker. 3D printing revealed to be the most reliable technique, though laser cutting and immediate prototyping of markers showed promising results. Based on our findings, we discuss individual strengths of each capacitive marker type.}, language = {en} } @inproceedings{KreimeierGoetzelmann2018, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {FeelVR: Haptic Exploration of Virtual Objects}, series = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, volume = {2018}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, publisher = {ACM}, address = {New York}, isbn = {978-1-4503-6390-7}, doi = {10.1145/3197768.3201526}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5611}, pages = {122 -- 125}, year = {2018}, abstract = {The interest in virtual and augmented reality increased rapidly in the last years. Recently, haptic interaction and its applications get into focus. In this paper, we suggest the exploration of virtual objects using off-the-shelf VR game controllers. These are held like a pen with both hands and were used to palpate and identify the virtual object. Our study largely coincides with comparable previous work and shows that a ready-to-use VR system can be basically used for haptic exploration. The results indicate that virtual objects are more effectively recognized with closed eyes than with open eyes. In both cases, objects with a bigger morphological difference were identified the most frequently. The limitations due to quality and quantity of tactile feedback should be tackled in future studies that utilize currently developed wearable haptic devices and haptic rendering involving all fingers or even both hands. Thus, objects could be identifiable more intuitively and haptic feedback devices for interacting with virtual objects will be further disseminated.}, language = {en} } @inproceedings{KreimeierHammerFriedmannetal.2019, author = {Kreimeier, Julian and Hammer, Sebastian and Friedmann, Daniel and Karg, Pascal and B{\"u}hner, Clemens and Bankel, Lukas and G{\"o}tzelmann, Timo}, title = {Evaluation of Different Types of Haptic Feedback Influencing the Task-based Presence and Performance in Virtual Reality}, series = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, booktitle = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-6232-0}, doi = {10.1145/3316782.3321536}, pages = {289 -- 298}, year = {2019}, abstract = {Haptic feedback may support immersion and presence in virtual reality (VR) environments. The emerging market of consumer devices offers first devices which are expected to increase the degree of feeling being actually present in a virtual environment. In this paper we introduce a novel evaluation that examines the influence of different types of haptic feedback on presence and performance regarding manual tasks in VR. Therefore, we conducted a comprehensive user study involving 14 subjects, who performed throwing, stacking and object identification tasks in VR with visual (i.e., sensory substitution), vibrotactile or force feedback. We measured the degree of presence and task-related performance metrics. Our results indicate that regarding presence vibrotactile feedback outperforms haptic feedback which performs better than visual feedback only. In addition, force feedback significantly lowered the execution time for the throwing and the stacking task. In object identification tasks, the vibrotactile feedback increased the detection rates compared to the vibrotactile and force feedback, but also increased the required time of identification. Despite the inadequacies of the still young consumer technology, there were nevertheless strong indications of connections between presence, task fulfillment and the type of haptic feedback.}, language = {en} } @inproceedings{GoetzelmannSchneider2016, author = {G{\"o}tzelmann, Timo and Schneider, Daniel}, title = {CapCodes: Capacitive 3D Printable Identification and On-screen Tracking for Tangible Interaction}, series = {NordiCHI '16: Proceedings of the 9th Nordic Conference on Human-Computer Interaction}, volume = {2016}, booktitle = {NordiCHI '16: Proceedings of the 9th Nordic Conference on Human-Computer Interaction}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-4763-1}, doi = {10.1145/2971485.2971518}, pages = {4}, year = {2016}, abstract = {Electronic markers can be used to link physical representations and virtual content for tangible interaction, such as visual markers commonly used for tabletops. Another possibility is to leverage capacitive touch inputs of smartphones, tablets and notebooks. However, existing approaches either do not couple physical and virtual representations or require significant post-processing. This paper presents and evaluates a novel approach using a coding scheme for the automatic identification of tangibles by touch inputs when they are touched and shifted. The codes can be generated automatically and integrated into a great variety of existing 3D models from the internet. The resulting models can then be printed completely in one cycle by off-the-shelf 3D printers; post processing is not needed. Besides the identification, the object's position and orientation can be tracked by touch devices. Our evaluation examined multiple variables and showed that the CapCodes can be integrated into existing 3D models and the approach could also be applied to untouched use for larger tangibles.}, language = {en} } @inproceedings{GoetzelmannEichler2015, author = {G{\"o}tzelmann, Timo and Eichler, Laura}, title = {BlindWeb Maps - An Interactive Web Service for the Selection and Generation of Personalized Audio-Tactile Maps}, series = {Proc. 15th International Conference on Computers Helping People with Special Needs}, volume = {2015}, booktitle = {Proc. 15th International Conference on Computers Helping People with Special Needs}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-41266-5}, pages = {139 -- 145}, year = {2015}, abstract = {Tactile maps may contribute to the orientation of blind people or alternatively be used for navigation. In the past, the generation of these maps was a manual task which considerably limited their availability. Nowadays, similar to visual maps, tactile maps can also be generated semi-automatically by tools and web services. The existing approaches enable users to generate maps by entering a specific address or point of interest. This can in principle be done by a blind user. However, these approaches actually show an image of the map on the users display which cannot be read by screen readers. Consequently, the blind user does not know what is on the map before it is printed. Ideally, the map selection process should give the user more information and freedom to select the desired excerpt. This paper introduces a novel web service for blind people to interactively select and automatically generate tactile maps. It adapts the interaction concept for map selection to the requirements of blind users whilst supporting multiple printing technologies. The integrated audio review of the map's contents allows earlier feedback to review if the currently selected map extract corresponds to the desired information need. Changes can be initiated before the map is printed which, especially for 3D printing, saves much time. The user is able to select map features to be included in the tactile map. Furthermore, the map rendering can be adapted to different zoom levels and supports multiple printing technologies. Finally, an evaluation with blind users was used to refine our approach.}, language = {en} } @inproceedings{GoetzelmannAlthaus2016, author = {G{\"o}tzelmann, Timo and Althaus, Christopher}, title = {TouchSurfaceModels: Capacitive Sensing Objects through 3D Printers}, series = {PETRA '16: Proceedings of the 9th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, volume = {2016}, booktitle = {PETRA '16: Proceedings of the 9th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-4337-4}, pages = {8}, year = {2016}, abstract = {Nowadays, 3D models can be downloaded from the internet and increasingly be printed by low cost 3D printers. In the future, blind people could benefit from this tendency. Unfortunately, many of these models are rather complex and not appropriate for the purely tactile exploration. To obtain quantitative data about how 3D printable models for blind people should be constructed, the tactile exploration can be recorded by video. However, the analysis of these videos is quite time consuming and expensive. Additionally, inaccuracies and masking effects may impede the use of this technique. In this paper we introduce a novel approach to automatically equip existing 3D models with a mesh of conductive wires which enable a touch sensitive surface for the printed 3D objects. These touch sensing 3D models can be printed in one turn by off-the-shelf 3D printers and used as an alternative to video recording. It allows exact registration of when and where the 3D object has been touched. In our multi-touch solution, particular attention has been paid to limit the number of necessary wires between 3D object and sensing electronics. Finally, our approach is evaluated by a feasibility study.}, language = {en} } @article{Goetzelmann2017, author = {G{\"o}tzelmann, Timo}, title = {3D-Druck f{\"u}r blinde Menschen}, series = {Informatik-Spektrum}, volume = {2017}, journal = {Informatik-Spektrum}, number = {Volume 40, Issue 6}, publisher = {Springer}, issn = {1432-122X}, doi = {10.1007/s00287-017-1068-8}, pages = {511 -- 515}, year = {2017}, abstract = {Neben herk{\"o}mmlichen taktilen Drucktechniken f{\"u}r blinde Menschen findet auch der 3D-Druck zunehmend Verbreitung. W{\"a}hrend anf{\"a}ngliche Ans{\"a}tze beabsichtigten, mit dieser alternativen Drucktechnologie qualitativ {\"a}hnliche Druckresultate zu erzielen, nutzen neuere Ans{\"a}tze deren Potenzial, um interaktive Drucke zu erstellen. Ausgehend von dieser Entwicklung verschafft dieser Artikel einen {\"U}berblick {\"u}ber wesentliche Ans{\"a}tze f{\"u}r die Erstellung von vielf{\"a}ltigen taktilen Materialen mittels 3D-Druckern. Er zeigt dabei insbesondere den Wandel von statischen zu interaktiven Ans{\"a}tzen auf. Dabei muss bei Letzteren eine Kopplung zwischen den taktilen 3D-Drucken und elektronischen Entit{\"a}ten erfolgen, welche durch unterschiedliche Sensorik umgesetzt werden kann. Zuk{\"u}nftige Entwicklungen k{\"o}nnten es erlauben, die Interaktion des Benutzers mit der kompletten Oberfl{\"a}che von 3D-Drucken sensorisch zu erfassen und somit komplexe neue Interaktionsm{\"o}glichkeiten zu erschließen, welche blinden wie auch sehenden Menschen hilfreich sein k{\"o}nnen.}, language = {de} } @inproceedings{GoetzelmannVazquez2015, author = {G{\"o}tzelmann, Timo and V{\´a}zquez, Pere-Pau}, title = {InclineType: An Accelerometer-based Typing Approach for Smartwatches}, series = {Proc. 16th International Conference on Human Computer Interaction}, volume = {2015}, booktitle = {Proc. 16th International Conference on Human Computer Interaction}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-3463-1}, doi = {10.1145/2829875.2829929}, year = {2015}, abstract = {Small mobile devices such as smartwatches are a rapidly growing market. However, they share the issue of limited input and output space which could impede the success of these devices in future. Hence, suitable alternatives to the concepts and metaphors known from smartphones have to be found. In this paper we present InclineType a tilt-based keyboard input that uses a 3-axis accelerometer for smartwatches. The user may directly select letters by moving his/her wrist and enters them by tapping on the touchscreen. Thanks to the distribution of the letters on the edges of the screen, the keyboard dedicates a low amount of space in the smartwatch. In order to optimize the user input our concept proposes multiple techniques to stabilize the user interaction. Finally, a user study shows that users get familiar with this technique with almost no previous training, reaching speeds of about 6 wpm in average.}, language = {en} } @inproceedings{Goetzelmann2017, author = {G{\"o}tzelmann, Timo}, title = {<> 3D Printable Hand Exoskeleton for the Haptic Exploration of Virtual 3D Scenes}, series = {PETRA '17: Proceedings of the 10th International Conference on PErvasive Technologies Related to Assistive Environments}, volume = {2017}, booktitle = {PETRA '17: Proceedings of the 10th International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-5227-7}, doi = {10.1145/3056540.3064950}, pages = {63 -- 66}, year = {2017}, abstract = {Virtual reality is currently experiencing a comeback. A considerable market has developed for VR computer games and educational applications. Some solutions integrate tracked devices which allow users to freely move within a certain space. Virtual 3D model can be visually explored, implemented collision detected allows users to get a feedback for instance by sound or vibration. For research projects there are several approaches which offer to get the actual feedback for the fingers of a hand, when the users virtually touches the surface of a 3D model. However, in the consumer market currently no product is sold which offers this direct feedback for the whole hand. In this paper we introduce a low-cost hand exoskeleton which is usable in conjunction with commodity hardware. It covers each of the five fingers of the user's hand, its design is open-source, low-cost, can be customized and 3D printed by individuals. It aims at improving the haptic perception of users, bases of a popular physical computing platform and is designed to be assembled even by electronically unexperienced users. We show the integration of our lean interface of the wireless exoskeleton into exemplary VR environment and describe a calibration process which is flexible for customizations.}, language = {en} }