@inproceedings{ReuterHauserMuckelbaueretal., author = {Reuter, Rebecca and Hauser, Florian and Muckelbauer, Daniel and Stark, Theresa and Antoni, Erika and Mottok, J{\"u}rgen and Wolff, Christian}, title = {Using augmented reality in software engineering education? First insights to a comparative study of 2D and AR UML modeling}, series = {Proceedings of the 52nd Hawaii International Conference on System Sciences (HICSS) and 31st Conference on Software Engineering Education and Training (CSEE\&T), January 8 - 11, 2019 Grand Wailea, Hawaii}, booktitle = {Proceedings of the 52nd Hawaii International Conference on System Sciences (HICSS) and 31st Conference on Software Engineering Education and Training (CSEE\&T), January 8 - 11, 2019 Grand Wailea, Hawaii}, doi = {10.24251/HICSS.2019.938}, pages = {7798 -- 7807}, abstract = {Although there has been much speculation about the potential of Augmented Reality (AR) in teaching for learning material, there is a significant lack of empirical proof about its effectiveness and implementation in higher education. We describe a software to integrate AR using the Microsoft Hololens into UML (Unified Modeling Language) teaching. Its user interface is laid out to overcome problems of existing software. We discuss the design of the tool and report a first evaluation study. The study is based upon effectiveness as a metric for students performance and components of motivation. The study was designed as control group experiment with two groups. The experimental group had to solve tasks with the help of the AR modeling tool and the control group used a classic PC software. We identified tendencies that participants of the experimental group showed more motivation than the control group. Both groups performed equally well}, language = {en} } @inproceedings{StauferEzerRoehrletal., author = {Staufer, Susanne and Ezer, Timur and R{\"o}hrl, Simon and Grabinger, Lisa and Hauser, Florian and Nadimpalli, Vamsi Krishna and Antoni, Erika and Mottok, J{\"u}rgen and Schaffer, Josefa}, title = {TYCHE ALGORITHM 2.0: Learning Paths from Questionnaire Responses and Learning Analytics}, series = {ICERI2025 Proceedings}, booktitle = {ICERI2025 Proceedings}, publisher = {IATED}, isbn = {978-84-09-78706-7}, doi = {10.21125/iceri.2025.0831}, url = {http://nbn-resolving.de/urn:nbn:de:bvb:898-opus4-87846}, pages = {2562 -- 2572}, abstract = {The digitalization of learning processes has increased the need for adaptive learning paths tailored individually to learners. A novel algorithm for learning path generation is presented in this paper, namely Tyche 2.0. It extends the original Tyche approach after Staufer et al. - a Markov model for generating learning paths - by integrating additional learner data beyond learning styles (Index of Learning Styles (ILS)), including learning strategies (LIST-K questionnaire), personality traits (BFI-10 questionnaire), and learning analytics captured through screen recordings. In order to be able to use the screen recordings, a heuristic evaluates them. Furthermore, this enhanced algorithm employs Markov models to dynamically generate personalized learning paths. These are based on both questionnaire responses and real-time engagement data, the weights of which undergo dynamic adjustment over time. We made a small evaluation of Tyche 2.0 without the learning analytics influence, which shows that there is room for further improvements. Future research will focus on evaluating whole Tyche 2.0 in another university setting to further improve personalization and user engagement.}, language = {en} } @inproceedings{SchafferEzerRoehrletal., author = {Schaffer, Josefa and Ezer, Timur and R{\"o}hrl, Simon and Hauser, Florian and Staufer, Susanne and Nadimpalli, Vamsi Krishna and Grabinger, Lisa and Antoni, Erika and Mottok, J{\"u}rgen}, title = {EYE TRACKING GLASSES IN EDUCATIONAL SETTINGS: GUIDELINES ON DATA QUALITY}, series = {ICERI2025 Proceedings}, booktitle = {ICERI2025 Proceedings}, editor = {G{\´o}mez Chova, Luis and Gonz{\´a}lez Mart{\´i}nez, Chelo and Lees, Joanna}, publisher = {IATED}, doi = {10.21125/iceri.2025.1419}, pages = {5027 -- 5038}, abstract = {Eye movement modeling examples, so-called EMME videos, are a valuable tool in education, helping learners better understand instructional content. Like conventional educational videos, EMME videos combine text, images, and voiceovers. However, they also display the instructor's or teacher's gaze, guiding learners attention to key elements. Although various approaches exist for creating EMME videos, there is currently no standardised guideline for ensuring gaze data quality. Eye tracking technology is essential to capture gaze behaviour, and in educational settings without a fixed computer monitor - such as when using blackboards or conducting live experiments - the usage of mobile eye tracking glasses is beneficial. An accuracy study is conducted using mobile eye tracking glasses to provide empirical guidance for the development of high-quality educational EMME videos and ensure that the instructor's or teacher's gaze is captured with high precision. The study uses the Tobii Pro Glasses 3 and involves a static and a dynamic setup with 34 participants. To gain insight into the effects of visual impairments on accuracy, we also include participants who wear contact lenses. In the static setup, participants are seated at a desk with a headrest and focus on a poster with nine fixation points. In the dynamic setup, participants are walking in a controlled half-circle around the poster while maintaining focus on its centre. Each setup is performed multiple times under varying lighting levels (300 lux, 700 lux) and distances between participant and poster (80 cm, 120 cm, 180 cm). This enables the simulation of diverse educational environments, including the possibility of a teacher's or instructor's movement. The study results will be evaluated regarding lighting conditions, the distance between the person wearing eye tracking glasses and the object, and possible influences of contact lenses. Based on these findings, favourable conditions for creating EMME videos in educational settings are collected, especially when working without a fixed computer monitor. The results address the outlined research gap by providing instructors and teachers with guidelines enabling them to produce high-quality educational EMME videos.}, language = {en} }