@inproceedings{UllmannKreimeierGoetzelmannetal.2020, author = {Ullmann, Daniela and Kreimeier, Julian and G{\"o}tzelmann, Timo and Kipke, Harald}, title = {BikeVR : a virtual reality bicycle simulator towards sustainable urban space and traffic planning}, series = {Proceedings of Mensch und Computer 2020}, booktitle = {Proceedings of Mensch und Computer 2020}, publisher = {Association for Computing Machinery}, address = {New York, NY}, isbn = {978-1-4503-7540-5}, doi = {10.1145/3404983.3410417}, pages = {511-514}, year = {2020}, abstract = {While becoming more and more aware of the ongoing climate change, eco-friendly means of transport for all citizens are moving further into focus. In order to be able to implement specific measures, it is necessary to better understand and emphasize sustainable transportation like walking and cycling through focused research. When developing novel traffic concepts and urban spaces for non-motorized traffic participants like bicycles and pedestrians, traffic and urban planning must be focused on their needs. To provide rare qualitative factors (such as stress, the perception of time and attractiveness of the environment) in this context, we present an audiovisual VR bicycle simulator which allows the user to cycle through a virtual urban environment by physically pedaling and also steering. Virtual Reality (VR) is a suitable tool in this context, as study participants find identical and almost freely definable (virtual) urban spaces with adjustable traffic scenarios. Our preliminary prototype proved to be promising and will be further optimized and evaluated.}, language = {en} } @article{KreimeierBielmeierGoetzelmann2018, author = {Kreimeier, Julian and Bielmeier, Thomas and G{\"o}tzelmann, Timo}, title = {Evaluation of Capacitive Markers Fabricated by 3D Printing, Laser Cutting and Prototyping}, series = {Inventions: Special Issue Innovations in 3-D Printing}, volume = {2018}, journal = {Inventions: Special Issue Innovations in 3-D Printing}, number = {Volume 3, Issue 1, Article 9}, publisher = {MDPI}, doi = {10.3390/inventions3010009}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5603}, pages = {18}, year = {2018}, abstract = {With Tangible User Interfaces, the computer user is able to interact in a fundamentally different and more intuitive way than with usual 2D displays. By grasping real physical objects, information can also be conveyed haptically, i.e., the user not only sees information on a 2D display, but can also grasp physical representations. To recognize such objects ("tangibles") it is skillful to use capacitive sensing, as it happens in most touch screens. Thus, real objects can be located and identified by the touch screen display automatically. Recent work already addressed such capacitive markers, but focused on their coding scheme and automated fabrication by 3D printing. This paper goes beyond the fabrication by 3D printers and, for the first time, applies the concept of capacitive codes to laser cutting and another immediate prototyping approach using modeling clay. Beside the evaluation of additional properties, we adapt recent research results regarding the optimized detection of tangible objects on capacitive screens. As a result of our comprehensive study, the detection performance is affected by the type of capacitive signal processing (respectively the device) and the geometry of the marker. 3D printing revealed to be the most reliable technique, though laser cutting and immediate prototyping of markers showed promising results. Based on our findings, we discuss individual strengths of each capacitive marker type.}, language = {en} } @inproceedings{KreimeierGoetzelmann2018, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {FeelVR: Haptic Exploration of Virtual Objects}, series = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, volume = {2018}, booktitle = {Proceedings of the 11th PErvasive Technologies Related to Assistive Environments Conference (PETRA '18)}, publisher = {ACM}, address = {New York}, isbn = {978-1-4503-6390-7}, doi = {10.1145/3197768.3201526}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:92-opus4-5611}, pages = {122 -- 125}, year = {2018}, abstract = {The interest in virtual and augmented reality increased rapidly in the last years. Recently, haptic interaction and its applications get into focus. In this paper, we suggest the exploration of virtual objects using off-the-shelf VR game controllers. These are held like a pen with both hands and were used to palpate and identify the virtual object. Our study largely coincides with comparable previous work and shows that a ready-to-use VR system can be basically used for haptic exploration. The results indicate that virtual objects are more effectively recognized with closed eyes than with open eyes. In both cases, objects with a bigger morphological difference were identified the most frequently. The limitations due to quality and quantity of tactile feedback should be tackled in future studies that utilize currently developed wearable haptic devices and haptic rendering involving all fingers or even both hands. Thus, objects could be identifiable more intuitively and haptic feedback devices for interacting with virtual objects will be further disseminated.}, language = {en} } @inproceedings{KreimeierHammerFriedmannetal.2019, author = {Kreimeier, Julian and Hammer, Sebastian and Friedmann, Daniel and Karg, Pascal and B{\"u}hner, Clemens and Bankel, Lukas and G{\"o}tzelmann, Timo}, title = {Evaluation of Different Types of Haptic Feedback Influencing the Task-based Presence and Performance in Virtual Reality}, series = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, booktitle = {Proceedings of the 12th ACM International Conference on PErvasive Technologies Related to Assistive Environments (PETRA'19)}, publisher = {ACM}, address = {New York, NY, USA}, isbn = {978-1-4503-6232-0}, doi = {10.1145/3316782.3321536}, pages = {289 -- 298}, year = {2019}, abstract = {Haptic feedback may support immersion and presence in virtual reality (VR) environments. The emerging market of consumer devices offers first devices which are expected to increase the degree of feeling being actually present in a virtual environment. In this paper we introduce a novel evaluation that examines the influence of different types of haptic feedback on presence and performance regarding manual tasks in VR. Therefore, we conducted a comprehensive user study involving 14 subjects, who performed throwing, stacking and object identification tasks in VR with visual (i.e., sensory substitution), vibrotactile or force feedback. We measured the degree of presence and task-related performance metrics. Our results indicate that regarding presence vibrotactile feedback outperforms haptic feedback which performs better than visual feedback only. In addition, force feedback significantly lowered the execution time for the throwing and the stacking task. In object identification tasks, the vibrotactile feedback increased the detection rates compared to the vibrotactile and force feedback, but also increased the required time of identification. Despite the inadequacies of the still young consumer technology, there were nevertheless strong indications of connections between presence, task fulfillment and the type of haptic feedback.}, language = {en} } @inproceedings{Goetzelmann2013, author = {G{\"o}tzelmann, Timo}, title = {Concept of the Joint Use of Smartphone Camera and Projector for Keyboard Inputs}, publisher = {Gediz University Press}, address = {Gediz}, issn = {2147-9097}, pages = {52 - 57}, year = {2013}, abstract = {The efficiency of text input by today's smartphones is significantly limited by the small extents of the virtual keyboard displayed for allowing alphanumeric inputs. Future smartphones will integrate projectors which allow to project multimedia content as well as the smartphones' dialogs. This paper introduces a concept to project the whole smartphone's display onto a surface allowing the user to realize text inputs by interacting with the virtual keyboard projection. This projection is analyzed by standard image processing algorithms. Finally, an experimental implementation shows the feasibility of this concept.}, subject = {Mensch-Maschine-Kommunikation}, language = {en} } @inproceedings{DeitschGoetzelmannGallwitz2014, author = {Deitsch, Sergiu and G{\"o}tzelmann, Timo and Gallwitz, Florian}, title = {Smartphone Input Using Its Integrated Projector and Built-In Camera}, publisher = {Springer}, address = {Cham}, isbn = {978-3-319-07226-5}, doi = {10.1007/978-3-319-07227-2_13}, pages = {124 -133}, year = {2014}, abstract = {Touch input on modern smartphones can be tedious, especially if the touchscreen is small. Smartphones with integrated projectors can be used to overcome this limitation by projecting the screen contents onto a surface, allowing the user to interact with the projection by means of simple hand gestures. In this work, we propose a novel approach for projector smartphones that allows the user to remotely interact with the smartphone screen via its projection. We detect user's interaction using the built-in camera, and forward detected hand gestures as touch input events to the operating system. In order to avoid costly computations, we additionally use built-in motion sensors. We verify the proposed method using an implementation for the consumer smartphone Samsung Galaxy Beam equipped with a deflection mirror. © 2014 Springer International Publishing.}, subject = {App }, language = {en} } @inproceedings{KargStoehrJonasetal.2023, author = {Karg, Pascal and St{\"o}hr, Roman and Jonas, Lisa and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Reflect-AR: Insights into Mirror-Based Augmented Reality Instructions to Support Manual Assembly Tasks}, series = {Proceedings of the 16th International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 16th International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3594806.3594866}, pages = {62 -- 68}, year = {2023}, abstract = {Manual assembly tasks can be difficult and tedious without assistance. Here, augmented reality (AR) can help to ease the task load and lower the time and error rates by interactive in-situ instructions. Such approaches are often implemented by video or optical see-through AR. In order to test a system that is as low-threshold as possible and easy to implement, we developed a prototype with a mirror display supplemented by a RGB-D camera and evaluated it against an well-established AR HMD (HoloLens 2). For this purpose, in our study 10 participants placed 3D printed bricks via both technologies. The quantitative and qualitative analysis revealed that the fulfillment of this task with a professional, established product AR HMD yet remains unmatched in terms of the time required, error rate, usability and task load due to the prototype status and the remaining technical shortcomings. However, the mirror display setup prototype met with interest from the participants as a novel, but unfamiliar and thus more difficult to use way of interaction. Furthermore, we report implementation challenges and advises. The empirical insights of our prototype and the first-time comparison to an established AR HMD aims to foster refining future work with half-silvered AR mirrors as an little researched field and many different fields of application.}, language = {en} } @incollection{OumardKreimeierGoetzelmann2022, author = {Oumard, Christina and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Pardon? An Overview of the Current State and Requirements of Voice User Interfaces for Blind and Visually Impaired Users}, series = {Lecture Notes in Computer Science}, booktitle = {Lecture Notes in Computer Science}, publisher = {Springer International Publishing}, address = {Cham}, isbn = {9783031086472}, issn = {0302-9743}, doi = {10.1007/978-3-031-08648-9_45}, pages = {388 -- 398}, year = {2022}, abstract = {People with special needs like blind and visually impaired (BVI) people can particularly benefit from using voice assistants providing spoken information input and output in everyday life. However, it is crucial to understand their needs and include them in developing accessible and useful assistance systems. By conducting an online survey with 146 BVI people, this paper revealed that common voice assistants like Apple's Siri or Amazon's Alexa are used by a majority of BVI people and are also considered helpful. In particular, features in audio entertainment, internet access, and everyday life practical things like weather queries, time-related information (e.g., setting an alarm clock), checking calendar entries, and taking notes are particularly often used and appreciated. The participants also indicated that the integration of smart home devices, the optimization of existing functionalities, and voice input are important. Still, also potentially negative aspects such as data privacy and data security are relevant. Therefore, it seems particularly interesting to implement offline data processing as far as possible. Our results contribute to this development by providing an overview of empirically collected requirements for functions and implementation-related aspects.}, language = {en} } @inproceedings{LyKargKreimeieretal.2022, author = {Ly, Kim and Karg, Pascal and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Development and Evaluation of a Low-cost Wheelchair Simulator for the Haptic Rendering of Virtual Road Conditions}, series = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3529190.3529195}, pages = {32 -- 39}, year = {2022}, abstract = {Many streets and buildings are not accessible for wheelchair users, which impedes a major challenge for their mobility. Often, such challenges can be considered during the planning stage, which is why it is important to include this user group in the planning process. Ideally, these planning efforts should also be made visually and haptically navigable using Virtual Reality (VR) technology to allow for better imagination, accurate conclusions, and awareness of wheelchair users. This work optimizes previous approaches so that different roadway conditions can be made haptically perceptible. The proposed prototype provides a non-contact and adjustable brake for simulating inclined planes, which can be adjusted depending on personal and environmental parameters. In addition, it can simulate roads such as cobblestones using a tactile transducer. The individually optimized components were combined into a complete VR system and integrated into a virtual environment for evaluation. The qualitative and quantitative results showed that realistic simulation is possible, but further development steps towards holistic and dissemination-capable hardware and software are needed. To this end, our contribution aims to improve the long-term involvement of wheelchair users in planning processes and increase awareness of their mobility situation.}, language = {en} } @inproceedings{KargKreimeierGoetzelmann2021, author = {Karg, Pascal and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Build-and-Touch: A Low-Cost, DIY, Open-Source Approach Towards Touchable Virtual Reality}, series = {Proceedings of the 14th PErvasive Technologies Related to Assistive Environments Conference}, booktitle = {Proceedings of the 14th PErvasive Technologies Related to Assistive Environments Conference}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3453892.3462217}, pages = {258 -- 259}, year = {2021}, abstract = {Virtual Reality (VR) is attracting more and more attention from academic research and practical application with the current availability of low-cost and end-user friendly devices. In terms of haptic (rather than visual) interaction, however, this technology is still in its infancy and there are few devices that are inexpensive and technologically simple to operate, respectively to procure. In this context, we present a concept of how haptic interaction with VR data gloves can also succeed by means of a commercially available web cam, sophisticated tracking software, and homemade low-cost hardware. All hardware and software components are to be obtained inexpensively or are open-source in order to achieve the greatest possible dissemination potential. With this work, we intend to provide an important trigger for future improvements and dissemination in terms of both technology and areas of application.}, language = {en} } @inproceedings{GoetzelmannKreimeierSchwabletal.2021, author = {G{\"o}tzelmann, Timo and Kreimeier, Julian and Schwabl, Johannes and Karg, Pascal and Oumard, Christina and B{\"u}ttner, Florian}, title = {AmI-VR: An Accessible Building Information System as Case Study Towards the Applicability of Ambient Intelligence in Virtual Reality}, series = {Mensch und Computer 2021}, booktitle = {Mensch und Computer 2021}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3473856.3474032}, pages = {597 -- 600}, year = {2021}, abstract = {Ambient intelligence represents a paradigm in which the user does not react to the environment, but vice versa. Accordingly, smart environments can react to the presence and activities of users and support them unobtrusively from the background. Especially in the context of accessibility, this offers great potential that has so far only been demonstrated for individual user groups. To overcome this limitation, we propose the automated, user- and context-related adaptation of the modality as well as locality of the representation of building information in the form of both an adjustable table as well as two displays on the basis of a prototype for a library information center. For being independent from material and regulatory restrictions and for better planability (especially with the ongoing COVID-19 pandemic) we used in addition to the hardware components also a Virtual Reality simulation, which proved to be very useful. Further optimization and evaluation will be needed for a more in depth understanding and dissemination in the long run, yet our prototype aims to help fostering further activities in the field of ambient intelligence, accessibility and virtual reality as a planning tool.}, language = {en} } @inproceedings{KreimeierUllmannKipkeetal.2020, author = {Kreimeier, Julian and Ullmann, Daniela and Kipke, Harald and G{\"o}tzelmann, Timo}, title = {Initial Evaluation of Different Types of Virtual Reality Locomotion Towards a Pedestrian Simulator for Urban and Transportation Planning}, series = {Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems}, booktitle = {Extended Abstracts of the 2020 CHI Conference on Human Factors in Computing Systems}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3334480.3382958}, pages = {1 -- 6}, year = {2020}, abstract = {The simulation of human behaviour in today's travel demand models is usually based on the assumption of a rational behaviour of its participants. Since travel demand models have been applied in particular for motorized traffic, only little is known about the influence of variables that affect both the choice of trip destination and the route decision in pedestrian and cycling models. In order to create urban spaces that encourage cycling and walking, we propose a VR (Virtual Reality) pedestrian simulator which involves walk-in-place locomotion. Thus, identical conditions are obtained for all subjects which is not feasible in real world field research with naturally varying environmental influences. As a first step, our qualitative and quantitative user study revealed that walking in a VR treadmill felt safest and most intuitive, although walking in it took in return more energy than walking-in-place with VR trackers only.}, language = {en} } @inproceedings{KreimeierKargGoetzelmann2020, author = {Kreimeier, Julian and Karg, Pascal and G{\"o}tzelmann, Timo}, title = {Tabletop virtual haptics}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3389194}, pages = {1 -- 10}, year = {2020}, abstract = {When thinking of Virtual Reality (VR), most people think of stunning audio-visual environments in the context of entertainment. However, VR can also provide haptic information, e.g., to convey spatial information to blind and visually impaired people. In this context of accessibility they might be able to explore independently and self-determined tactile graphics, e.g., the structure of unknown real places prior to visiting them. Thus, we propose and evaluate tabletop virtual objects that can be felt by nowadays commercially available VR components, instead of exploring physical models (e.g., 3D printed maps) with the bare hand. These can be easily placed on an empty table, giving the blind user faster and more independent access to tactile information than with real physical representations. Our comprehensive pilot user study shows that it is possible to recognize floor plans and simple geometric shapes in this context. Also, the insights gained with regard to the suitability for practical application in this context point out the way to eased access to spatial (virtual) information using off-the-shelf components which can significantly support blind and visually impaired users' autonomy.}, language = {en} } @inproceedings{KreimeierKargGoetzelmann2020, author = {Kreimeier, Julian and Karg, Pascal and G{\"o}tzelmann, Timo}, title = {BlindWalkVR}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3389193}, pages = {1 -- 8}, year = {2020}, abstract = {Virtual Reality (VR) promises expanded access to spatial information, especially for blind and visually impaired people. Through haptic and acoustic feedback, real world's limitations like the risk of injury or the necessity of a sighted safety assistant can be circumvented. However, the best possible profit of this technology requires interactive locomotion in large virtual environments to overcome real world space limitations. Thus, we present formative insights of blind people's egocentric VR locomotion by comparing four different implementations (i.e., two VR treadmills, trackers on the ankles or joystick based locomotion) in a qualitative and quantitative user study with seven blind and visually impaired participants. Our results reveal novel insights on characteristics of each implementation in terms of usability and practicability and also provide recommendations for further work in this field with the target user group in sight.}, language = {en} } @inproceedings{KreimeierKappeGoetzelmann2020, author = {Kreimeier, Julian and Kappe, Maximilian and G{\"o}tzelmann, Timo}, title = {BlindScanLine}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3393742}, pages = {1 -- 4}, year = {2020}, abstract = {Sonification is a promising way for blind and visually impaired people to capture and process information purely auditory, e.g., by mapping distance metrics on sound characteristics. To optimally use the users' sensory bandwidth and cognitive load a sequential scanning at multiple azimuth angles instead of only one measuring point straight ahead could be a suitable option. Thus, we present a preliminary cross-platform implementation and evaluation of such a sequential 'line-scanning sonification' using off-the-shelf components while comparing frequency (FM) and amplitude modulation (AM). In our user study with blindfolded and visually impaired participants, users gained a more accurate mental model in significantly shorter time by means of FM compared to AM and the HoloLens' usability was rated better than our LIDAR prototype's. These initial findings show possibilities for further improvement, so that similar approaches could be used more and better in blind and visually impaired peoples' everyday life.}, language = {en} } @article{KreimeierGoetzelmann2020, author = {Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Two Decades of Touchable and Walkable Virtual Reality for Blind and Visually Impaired People: A High-Level Taxonomy}, series = {Multimodal Technologies and Interaction}, volume = {4}, journal = {Multimodal Technologies and Interaction}, number = {4}, publisher = {MDPI AG}, issn = {2414-4088}, doi = {10.3390/mti4040079}, year = {2020}, abstract = {Although most readers associate the term virtual reality (VR) with visually appealing entertainment content, this technology also promises to be helpful to disadvantaged people like blind or visually impaired people. While overcoming physical objects' and spaces' limitations, virtual objects and environments that can be spatially explored have a particular benefit. To give readers a complete, clear and concise overview of current and past publications on touchable and walkable audio supplemented VR applications for blind and visually impaired users, this survey paper presents a high-level taxonomy to cluster the work done up to now from the perspective of technology, interaction and application. In this respect, we introduced a classification into small-, medium- and large-scale virtual environments to cluster and characterize related work. Our comprehensive table shows that especially grounded force feedback devices for haptic feedback ('small scale') were strongly researched in different applications scenarios and mainly from an exocentric perspective, but there are also increasingly physically ('medium scale') or avatar-walkable ('large scale') egocentric audio-haptic virtual environments. In this respect, novel and widespread interfaces such as smartphones or nowadays consumer grade VR components represent a promising potential for further improvements. Our survey paper provides a database on related work to foster the creation process of new ideas and approaches for both technical and methodological aspects.}, language = {en} } @inproceedings{GoetzelmannKreimeier2020, author = {G{\"o}tzelmann, Timo and Kreimeier, Julian}, title = {Towards the inclusion of wheelchair users in smart city planning through virtual reality simulation}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3398008}, pages = {1 -- 7}, year = {2020}, abstract = {The planning of Smart Cities is a complex task. In particular, accessibility rules based on legal regulations but also on empirical values must be observed. However, it is difficult to determine in advance the exact needs of people with disabilities for concrete planning. Previous approaches mainly aimed at existing urban environments. Ideally, citizens should be directly involved in the planning process of buildings and urban environments. For existing urban environments, crowdsourcing approaches exist to obtain suggestions for improvement from citizens. We present a novel approach for the direct integration of wheelchair users in the urban environments to be planned (participatory urban development) in virtual reality. We present an easy-to-reproduce simulator that allows wheelchair users to directly explore the planned buildings and urban environments in a virtual, spatial environment. This means that these 3D models can be commented already in the planning phase and provide valuable information about accessibility.}, language = {en} } @inproceedings{GoetzelmannKreimeier2020, author = {G{\"o}tzelmann, Timo and Kreimeier, Julian}, title = {Participation of elderly people in smart city planning by means of virtual reality}, series = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 13th ACM International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3397649}, pages = {1 -- 2}, year = {2020}, abstract = {Urbanisation is progressing, the population in many countries has a growing proportion of old people. This must be taken into account when transforming urban environments into Smart Cities. On the one hand, general accessibility rules must be taken into account in the planning of buildings and urban environments. However, an essential requirement for transformation is the participation of citizens, i.e., concrete suggestions for improvement from citizen should be taken into account. Elderly people are a valuable source of information for improvements. Instead of simply involving them in the modification of existing facilities, our approach suggests that they should be included in the planning process. However, abstract plans and questionnaires allow only limited insights for ordinary citizen. Therefore, our approach aims at providing a suitable interface to interactively walk through and annotate virtual reality plans for buildings and city districts. We present a working prototype for elderly people based on 3D consumer hardware appropriate form of locomotion with which they can explore and annotate urban planning according to their individual needs.}, language = {en} } @inproceedings{GoetzelmannKreimeier2020, author = {G{\"o}tzelmann, Timo and Kreimeier, Julian}, title = {Optimization of navigation considerations of people with visual impairments through ambient intelligence}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3389189.3398009}, pages = {1 -- 6}, year = {2020}, abstract = {As urbanization progresses, cities are becoming increasingly complex. To make this complexity advantageous, Smart Cities integrate intelligent sensors that communicate with each other and are invisible to citizens, so that they can offer additional services. Despite the fact that the inclusion of diverse citizens is an essential requirement of Smart Cities, Ambient Intelligence is often not considered under accessibility aspects. The central question in this context is how disadvantaged groups in particular can benefit from Smart Cities. It is extremely important for people with disabilities to be able to move autonomously in public spaces. For this purpose, however, people who are blind sometimes need to be able to ask other people for directions. Knowing whether people are present can be important information. In our exemplary case study, Ambient Intelligence is used for people with visual impairments, so that they can decide whether they want to go to this or an alternative place based on the information about the density of people in a place.}, language = {en} } @inproceedings{OumardKreimeierGoetzelmann2022, author = {Oumard, Christina and Kreimeier, Julian and G{\"o}tzelmann, Timo}, title = {Implementation and Evaluation of a Voice User Interface with Offline Speech Processing for People who are Blind or Visually Impaired}, series = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, booktitle = {Proceedings of the 15th International Conference on PErvasive Technologies Related to Assistive Environments}, publisher = {ACM}, address = {New York, NY, USA}, doi = {10.1145/3529190.3529197}, pages = {277 -- 285}, year = {2022}, abstract = {Assistive technologies help blind and visually impaired people to manage their daily lives independently. However, they usually have to work with voice user interfaces to use smartphones and tablets. Tasks like managing the calendar, taking notes, or setting an alarm clock require reliable voice recognition, which entails online access for remote computing. However, apart from data privacy and security issues, an online connection is not available in any situation. In this regard, our paper develops and evaluates an offline voice user interface with offline speech processing. The voice assistant was tested with seven blind and visually impaired people. It was found that the assistant was very well received (i.e., pragmatic, hedonic, and general quality) and that no functional limitations could be perceived due to the offline data processing. Based on these findings, the scope of functionality and the level of detail of the evaluation can be extended further to adapt this technology for this specific user group and promote its dissemination.}, language = {en} }