@inproceedings{GoetzelmannPavkovic2014, author = {G{\"o}tzelmann, Timo and Pavkovic, Aleksander}, title = {Towards Automatically Generated Tactile Detail Maps by 3D Printers for Blind Persons}, series = {Computers Helping People with Special Needs}, booktitle = {Computers Helping People with Special Needs}, publisher = {Springer}, isbn = {978-3-319-08599-9}, doi = {10.1007/978-3-319-08599-9_1}, pages = {1-7}, year = {2014}, abstract = {This paper introduces an approach for the (semi)automatic generation of worldwide available, detailed tactile maps including buildings and blind-specific features based on recognized illustrators' guidelines and standards. These guidelines for tactile maps are investigated in order to define a formal rule set and to automatically filter map data accordingly. Using the rule set, our approach automatically abstracts map data in order to generate a 2.1D tactile model providing multiple height levels (layers) which can be printed by usual consumer 3D printers. Based on the popular OpenStreetMap map data, our automated approach allows to generate arbitrary detail maps blind persons individually interested in, without the need for manual adaption of the tactile map. Thus, this approach contributes to the goal to increase the autonomy of blind persons.}, subject = {3D-Drucker}, language = {en} } @article{Goetzelmann2018, author = {G{\"o}tzelmann, Timo}, title = {Visually Augmented Audio-Tactile Graphics for Visually Impaired People}, series = {ACM Transactions on Accessible Computing (TACCESS)}, volume = {2018}, journal = {ACM Transactions on Accessible Computing (TACCESS)}, number = {Volume 11, Issue 2, Article No. 8}, publisher = {ACM}, doi = {10.1145/3186894}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5571}, year = {2018}, abstract = {Tactile graphics play an essential role in knowledge transfer for blind people. The tactile exploration of these graphics is often challenging because of the cognitive load caused by physiological constraints and their complexity. The coupling of physical tactile graphics with electronic devices offers to support the tactile exploration by auditory feedback. Often, these systems have strict constraints regarding their mobility or the process of coupling both components. Additionally, visually impaired people cannot appropriately benefit from their residual vision. This article presents a concept for 3D printed tactile graphics, which offers to use audio-tactile graphics with usual smartphones or tablet-computers. By using capacitive markers, the coupling of the tactile graphics with the mobile device is simplified. These tactile graphics integrating these markers can be printed in one turn by off-the-shelf 3D printers without any post-processing and allows us to use multiple elevation levels for graphical elements. Based on the developed generic concept on visually augmented audio-tactile graphics, we presented a case study for maps. A prototypical implementation was tested by a user study with visually impaired people. All the participants were able to interact with the 3D printed tactile maps using a standard tablet computer. To study the effect of visual augmentation of graphical elements, we conducted another comprehensive user study. We tested multiple types of graphics and obtained evidence that visual augmentation may offer clear advantages for the exploration of tactile graphics. Even participants with a minor residual vision could solve the tasks with visual augmentation more quickly and accurately.}, language = {en} } @article{Goetzelmann2018, author = {G{\"o}tzelmann, Timo}, title = {Autonomous Selection and Printing of 3D Models for People Who Are Blind}, series = {ACM Transactions on Accessible Computing (TACCESS)}, volume = {2018}, journal = {ACM Transactions on Accessible Computing (TACCESS)}, number = {Volume 11 Issue 3, Article No. 14}, publisher = {ACM}, doi = {10.1145/3241066}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5587}, pages = {1 -- 25}, year = {2018}, abstract = {3D models are an important means for understanding spatial contexts. Today these models can be materialized by 3D printing, which is increasingly used at schools for people with visual impairments. In contrast to sighted people, people with visual impairments have so far, however, neither been able to search nor to print 3D models without assistance. This article describes our work to develop an aid for people with visual impairments that would facilitate autonomous searching for and printing of 3D models. In our initial study, we determined the requirements to accomplish this task by means of a questionnaire and developed a first approach that allowed personal computer-based 3D printing. An extended approach allowed searching and printing using common smartphones. In our architecture, technical details of 3D printers are abstracted by a separate component that can be accessed via Wi-Fi independently of the actual 3D printer used. It comprises a search of the models in an annotated database and 3D model retrieval from the internet. The whole process can be controlled by voice interaction. The feasibility of autonomous 3D printing for people with visual impairments is shown with a first user study. Our second user study examines the usability of the user interface when searching for 3D models on the internet and preparing them for the materialization. The participants were able to define important printing settings, whereas other printing parameters could be determined algorithmically.}, language = {en} } @inproceedings{KreimeierGoetzelmann2018, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Real World VR Proxies to Support Blind People in Mobility Training}, series = {Proceedings of Mensch und Computer 2018 (MuC'18)}, volume = {2018}, booktitle = {Proceedings of Mensch und Computer 2018 (MuC'18)}, publisher = {Gesellschaft f{\"u}r Informatik e.V.}, address = {Bonn}, doi = {10.18420/muc2018-demo-0484}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5592}, pages = {5}, year = {2018}, abstract = {Mobility training is an essential part of blind people's education in order to move in public spaces. In order to safely learn new routes in public space, however, a seeing trainer must assist the blind person. With the increasing availability of VR hardware, it is possible to transfer real spatial environments to virtual representations. The digitized environments can be used as a basis for this training without safety problems by real world hazards. This allows to cope with the limited resources of sighted assistants and enables blind people to become more independent. We propose to capture real public spaces (such as sidewalks, train stations etc.) and make them in this way ascertainable. Orientation and mobility can be trained in this digital model via multimodal sensory feedback while involving intuitive locomotion and white cane exploration. This paper sketches the related work and proposes our novel approach. Furthermore, we suggest additional improvements on our ongoing research.}, language = {de} } @article{KreimeierBielmeierGoetzelmann2018, author = {Kreimeier, Julian and Bielmeier, Tobias and G{\"o}tzelmann, Timo}, title = {Evaluation of Capacitive Markers Fabricated by 3D Printing, Laser Cutting and Prototyping}, series = {Journal of Inventions: Special Issue Innovations in 3-D Printing}, volume = {2018}, journal = {Journal of Inventions: Special Issue Innovations in 3-D Printing}, number = {Volume 3, Issue 1, Article 9}, publisher = {MDPI}, doi = {10.3390/inventions3010009}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5603}, pages = {18}, year = {2018}, abstract = {With Tangible User Interfaces, the computer user is able to interact in a fundamentally different and more intuitive way than with usual 2D displays. By grasping real physical objects, information can also be conveyed haptically, i.e., the user not only sees information on a 2D display, but can also grasp physical representations. To recognize such objects ("tangibles") it is skillful to use capacitive sensing, as it happens in most touch screens. Thus, real objects can be located and identified by the touch screen display automatically. Recent work already addressed such capacitive markers, but focused on their coding scheme and automated fabrication by 3D printing. This paper goes beyond the fabrication by 3D printers and, for the first time, applies the concept of capacitive codes to laser cutting and another immediate prototyping approach using modeling clay. Beside the evaluation of additional properties, we adapt recent research results regarding the optimized detection of tangible objects on capacitive screens. As a result of our comprehensive study, the detection performance is affected by the type of capacitive signal processing (respectively the device) and the geometry of the marker. 3D printing revealed to be the most reliable technique, though laser cutting and immediate prototyping of markers showed promising results. Based on our findings, we discuss individual strengths of each capacitive marker type.}, language = {en} } @inproceedings{Goetzelmann2015, author = {G{\"o}tzelmann, Timo}, title = {CapMaps: Capacitive Sensing 3D Printed Audio-Tactile Maps}, series = {Proc. 15th International Conference on Computers Helping People with Special Needs}, volume = {2015}, booktitle = {Proc. 15th International Conference on Computers Helping People with Special Needs}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-41266-5}, pages = {146 -- 152}, year = {2015}, abstract = {Tactile maps can be useful tools for blind people for navigation and orientation tasks. Apart from static maps, there are techniques to augment tactile maps with audio content. They can be used to interact with the map content, to offer extra information and to reduce the tactile complexity of a map. Studies show that audio-tactile maps can be more efficient and satisfying for the user than pure tactile maps without audio feedback. A major challenge of audio-tactile maps is the linkage of tactile elements with audio content and interactivity. This paper introduces a novel approach to link 3D printed tactile maps with mobile devices, such as smartphones and tablets, in a flexible way to enable interactivity and audio-support. By integrating conductive filaments into the printed maps it seamlessly integrates into the 3D printing process. This allows to automatically recognize the tactile map by a single press at its corner. Additionally, the arrangement of the tactile map on the mobile device is flexible and detected automatically which eases the use of these maps. The practicability of this approach is shown by a dedicated feasibility study.}, language = {en} } @inproceedings{GoetzelmannEichler2015, author = {G{\"o}tzelmann, Timo and Eichler, Laura}, title = {BlindWeb Maps - An Interactive Web Service for the Selection and Generation of Personalized Audio-Tactile Maps}, series = {Proc. 15th International Conference on Computers Helping People with Special Needs}, volume = {2015}, booktitle = {Proc. 15th International Conference on Computers Helping People with Special Needs}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-41266-5}, pages = {139 -- 145}, year = {2015}, abstract = {Tactile maps may contribute to the orientation of blind people or alternatively be used for navigation. In the past, the generation of these maps was a manual task which considerably limited their availability. Nowadays, similar to visual maps, tactile maps can also be generated semi-automatically by tools and web services. The existing approaches enable users to generate maps by entering a specific address or point of interest. This can in principle be done by a blind user. However, these approaches actually show an image of the map on the users display which cannot be read by screen readers. Consequently, the blind user does not know what is on the map before it is printed. Ideally, the map selection process should give the user more information and freedom to select the desired excerpt. This paper introduces a novel web service for blind people to interactively select and automatically generate tactile maps. It adapts the interaction concept for map selection to the requirements of blind users whilst supporting multiple printing technologies. The integrated audio review of the map's contents allows earlier feedback to review if the currently selected map extract corresponds to the desired information need. Changes can be initiated before the map is printed which, especially for 3D printing, saves much time. The user is able to select map features to be included in the tactile map. Furthermore, the map rendering can be adapted to different zoom levels and supports multiple printing technologies. Finally, an evaluation with blind users was used to refine our approach.}, language = {en} } @inproceedings{GoetzelmannAlthaus2016, author = {G{\"o}tzelmann, Timo and Althaus, Christoph}, title = {TouchSurfaceModels: Capacitive Sensing Objects through 3D Printers}, series = {Proc. 9th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, volume = {2016}, booktitle = {Proc. 9th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-4337-4}, pages = {Article No. 22, pp. 1-8}, year = {2016}, abstract = {Nowadays, 3D models can be downloaded from the internet and increasingly be printed by low cost 3D printers. In the future, blind people could benefit from this tendency. Unfortunately, many of these models are rather complex and not appropriate for the purely tactile exploration. To obtain quantitative data about how 3D printable models for blind people should be constructed, the tactile exploration can be recorded by video. However, the analysis of these videos is quite time consuming and expensive. Additionally, inaccuracies and masking effects may impede the use of this technique. In this paper we introduce a novel approach to automatically equip existing 3D models with a mesh of conductive wires which enable a touch sensitive surface for the printed 3D objects. These touch sensing 3D models can be printed in one turn by off-the-shelf 3D printers and used as an alternative to video recording. It allows exact registration of when and where the 3D object has been touched. In our multi-touch solution, particular attention has been paid to limit the number of necessary wires between 3D object and sensing electronics. Finally, our approach is evaluated by a feasibility study.}, language = {en} } @inproceedings{Goetzelmann2014, author = {G{\"o}tzelmann, Timo}, title = {Interactive Tactile Maps for Blind People using Smartphones Integrated Cameras}, series = {Proc. 9th ACM International Conference on Interactive Tabletops and Surfaces (ITS'14)}, volume = {2014}, booktitle = {Proc. 9th ACM International Conference on Interactive Tabletops and Surfaces (ITS'14)}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-2587-5}, pages = {381 -- 385}, year = {2014}, abstract = {Tactile maps may support blind persons in orientation and understanding geographical relations, but their availability is still very limited. However, recent technologies such as 3D printers allow to autonomously print individual tactile maps which can be linked with interactive applications. Besides geographical depictions, textual annotation of maps is crucial. However, this often adds much complexity to tactile maps. To limit tactile complexity, interactive approaches may help to complement maps by the auditive modality. The presented approach integrates barcodes into tactile maps to allow their detection by standard smartphones' cameras. Automatically, more detailed map data is obtained to auditively support the exploration of the tactile map. Our experimental implementation shows the principal feasibility and provides the basis of ongoing comprehensive user studies.}, language = {en} } @inproceedings{Goetzelmann2015, author = {G{\"o}tzelmann, Timo}, title = {SmartTactMaps: A Smartphone-Based Approach to Support Blind Persons in Exploring Tactile Maps}, series = {Proc. 8th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, volume = {2015}, booktitle = {Proc. 8th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-3452-5}, pages = {2:1-2:8}, year = {2015}, abstract = {Despite increasing digitalization of our society many blind persons still have very limited access to predominantly pictorial information such as maps. In this paper we introduce a novel approach to improve the accessibility of maps for blind users by utilizing the abilities of standard smartphones. A major issue of tactile maps is the limited discriminability of the humans' tactile sense. Textual annotation of maps is crucial, but adds much complexity to tactile maps. Additionally, only few Braille labels can be accommodated to maintain legibility. In our approach we link smartphones with adapted tactile maps which transforms the physical maps into interactive surfaces using both the tactile and the auditory modality. We integrate machine readable metadata into these maps which can be recognized by the smartphones' camera to immediately obtain detailed map descriptions from a free global database. During tactile exploration of the map, blind users can request auditory explanations by interacting with the mobile application. An experimental application and a user study demonstrate the feasibility of our approach.}, language = {en} }