% % This file was created by the TYPO3 extension % publications % --- Timezone: CEST % Creation date: 2025-05-04 % Creation time: 00:25:42 % --- Number of references % 19 % @Article { 802126820457_2024, author = {Funke, Jana and Spahr, Julia and Hirzle, Teresa and Rukzio, Enrico}, title = {Where do you exercise? The Impact of Different Virtual Environments on Exergame Performance and User Experience: A Preliminary study}, abstract = {Environments can affect mood, motivation, and productivity. Green spaces, for example, are known to have calming effects on people’s moods. In virtual reality (VR), we could take advantage of these effects, as we have full visual control over the environment. In this paper, we explore how such potential effects caused by the environment impact performance and user experience (UX) when playing an exergame. We created four environments differing in their level of detail and visual realism.: (1) a white room, (2) outer space, (3) an abstract space, and (4) a forest environment. In a user study (N=26) in which participants played an exergame in all four environments, we found evidence that VEs influence enjoyment and performance. The simulation of green spaces or abstract VEs with enjoyable background sounds has a particularly positive impact. We discuss how environmental features impact performance and UX and present promising avenues for future work investigating specific parts of environmental features.}, year = {2024}, month = {6}, DOI = {10.1145/3639701.3663632}, journal = {IMX '24: Proceedings of the 2024 ACM International Conference on Interactive Media Experiences}, address = {New York, NY, USA}, web_url = {https://dl.acm.org/doi/abs/10.1145/3639701.3663632}, file_url = {t3://file?uid=490665} } @Article { 846238334021_2024, author = {Colley, Mark and Wanner, Beate and R\"{a}dler, Max and R\"{o}tzer, Marcel and Frommel, Julian and Hirzle, Teresa and Jansen, Pascal and Rukzio, Enrico}, title = {Effects of a Gaze-Based 2D Platform Game on User Enjoyment, Perceived Competence, and Digital Eye Strain}, status = {1}, year = {2024}, month = {5}, reviewed = {1}, journal = {In Proc. of CHI 2024 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, address = {New York, NY, USA}, file_url = {t3://file?uid=485718} } @Article { Sauter2023BehindScreens, author = {Sauter, Marian and Wagner, Tobias and Hirzle, Teresa and Xin Lin, Bao and Rukzio, Enrico and Huckauf, A}, title = {Behind the Screens: Exploring Eye Movement Visualization to Optimize Online Teaching and Learning}, abstract = {The effective delivery of e-learning depends on the continuous monitoring and management of student attention. While instructors in traditional classroom settings can easily assess crowd attention through gaze cues, these cues are largely unavailable in online learning environments. To address this challenge and highlight the significance of our study, we collected eye movement data from twenty students and developed four visualization methods: (a) a heat map, (b) an ellipse map, (c) two moving bars, and (d) a vertical bar, which were overlaid on 13 instructional videos. Our results revealed unexpected preferences among the instructors. Contrary to expectations, they did not prefer the established heat map and vertical bar for live online instruction. Instead, they chose the less intrusive ellipse visualization. Nevertheless, the heat map remained the preferred choice for retrospective analysis due to its more detailed information. Importantly, all visualizations were found to be useful and to help restore emotional connections in online learning. In conclusion, our innovative visualizations of crowd attention show considerable potential for a wide range of applications, extending beyond e-learning to all online presentations and retrospective analyses. The significant results of our study underscore the critical role these visualizations will play in enhancing both the effectiveness and emotional connectedness of future e-learning experiences, thereby facilitating the educational landscape.}, status = {1}, year = {2023}, DOI = {10.1145/3603555.3603560}, journal = {Proceedings of Mensch und Computer 2023}, tags = {Sauter2023BehindScreens}, file_url = {t3://file?uid=480120} } @Article { 448080489789_2023, author = {Wagner, Tobias and Hirzle, Teresa and Huckauf, Anke and Rukzio, Enrico}, title = {Exploring Gesture and Gaze Proxies to Communicate Instructor’s Nonverbal Cues in Lecture Videos}, abstract = {Teaching via lecture video has become the defacto standard for remote education, but videos make it difficult to interpret instructors' nonverbal referencing to the content. This is problematic, as nonverbal cues are essential for students to follow and understand a lecture. As remedy, we explored different proxies representing instructors' pointing gestures and gaze to provide students a point of reference in a lecture video: no proxy, gesture proxy, gaze proxy, alternating proxy, and concurrent proxies. In an online study with 100 students, we evaluated the proxies' effects on mental effort, cognitive load, learning performance, and user experience. Our results show that the proxies had no significant effect on learning-directed aspects and that the gesture and alternating proxy achieved the highest pragmatic quality. Furthermore, we found that alternating between proxies is a promising approach providing students with information about instructors' pointing and gaze position in a lecture video.}, year = {2023}, DOI = {10.1145/3544549.3585842}, journal = {In Extended Abstracts of the 2023 CHI Conference on Human Factors in Computing Systems (CHI EA ’23)}, address = {New York, NY, USA}, keywords = {Gaze Gesture Education Lecture video Eye-tracking}, web_url2 = {https://dl.acm.org/doi/10.1145/3544549.3585842}, file_url = {t3://file?uid=476912} } @Article { 366264846768_2023, author = {Funke, Jana and Schikorr, Anja and Karaosmanoglu, Sukran and Hirzle, Teresa and Steinicke, Frank and Rukzio, Enrico}, title = {Tiles to Move: Investigating Tile-Based Locomotion for Virtual Reality}, abstract = {Tile-based locomotion (TBL) is a popular locomotion technique for computer, console, and board games. However, despite its simplicity and unconventional movement, the transfer of TBL to virtual reality (VR) as a game platform remains unexplored. To fill this gap, we introduce TBL for VR on the example of two techniques: a controller and a feet-based one. In a first user study, we evaluated the usability and acceptance of the techniques compared to teleportation and touchpad locomotion. In a second exploratory user study, we evaluated the user experience of both TBL techniques in a maze and a museum scenario. The findings show that both techniques provide enjoyment and acceptable usability by creating either a relaxing (controller-based) or a physically active (feet-based) solution. Finally, our results highlight that TBL techniques work particularly well for small, constrained spaces that allow users to focus on exploring details in the nearby environment (important for games) in contrast to large open spaces that require faster locomotion, like teleportation.}, status = {1}, year = {2023}, DOI = {10.1145/3611060}, journal = {Proc. ACM Human Computer Interaction 7, CHI PLAY, Article 414 (November 2023)}, edition = {7}, web_url = {https://doi.org/10.1145/3611060}, file_url = {t3://file?uid=480810} } @Article { tochi_21_hirzle, author = {Hirzle, Teresa and Fischbach, Fabian and Karlbauer, Julian and Jansen, Pascal and Gugenheimer, Jan and Rukzio, Enrico and Bulling, Andreas}, title = {Understanding, Addressing, and Analysing Digital Eye Strain in Virtual Reality Head-Mounted Displays}, status = {1}, year = {2022}, month = {8}, DOI = {10.1145/3492802}, journal = {ACM Transactions on Computer-Human Interaction (TOCHI)}, volume = {29}, publisher = {ACM}, series = {4}, pages = {1-80}, web_url2 = {https://youtu.be/ns2HwQ2p\_hM \_blank}, file_url = {t3://file?uid=456150} } @Article { hirzle_AttentionOfManyObservers_2022, author = {Hirzle, Teresa and Sauter, Marian and Wagner, Tobias and Hummel, Susanne and Rukzio, Enrico and Huckauf, Anke}, title = {Attention of Many Observers Visualized by Eye Movements}, abstract = {Interacting with a group of people requires to direct the attention of the whole group, thus requires feedback about the crowd’s attention. In face-to-face interactions, head and eye movements serve as indicator for crowd attention. However, when interacting online, such indicators are not available. To substitute this information, gaze visualizations were adapted for a crowd scenario. We developed, implemented, and evaluated four types of visualizations of crowd attention in an online study with 72 participants using lecture videos enriched with audience’s gazes. All participants reported increased connectedness to the audience, especially for visualizations depicting the whole distribution of gaze including spatial information. Visualizations avoiding spatial overlay by depicting only the variability were regarded as less helpful, for real-time as well as for retrospective analyses of lectures. Improving our visualizations of crowd attention has the potential for a broad variety of applications, in all kinds of social interaction and communication in groups.}, year = {2022}, month = {6}, DOI = {10.1145/3517031.3529235}, institution = {Îçҹ̽»¨}, journal = {ETRA '22: 2022 Symposium on Eye Tracking Research and Applications}, tags = {hirzle\_AttentionOfManyObservers\_2022}, web_url2 = {/in/mi/hci/projects/attention-of-many-observers-visualized-by-eye-movements/}, file_url = {t3://file?uid=463792} } @Article { sauter_CanEyeMovement_2022, author = {Sauter, Marian and Hirzle, Teresa and Wagner, Tobias and Hummel, Susanne and Rukzio, Enrico and Huckauf, A}, title = {Can Eye Movement Synchronicity Predict Test Performance With Unreliably-Sampled Data in an Online Learning Context?}, abstract = {Webcam-based eye-tracking promises easy and quick data collection without the need for specific or additional eye-tracking hardware. This makes it especially attractive for educational research, in particular for modern formats, such as MOOCs. However, in order to fulfill its promises, webcam-based eye tracking has to overcome several challenges, most importantly, varying spatial and temporal resolutions. Another challenge that the educational domain faces especially, is that typically individual students are of interest in contrast to average values. In this paper, we explore whether an attention measure that is based on eye movement synchronicity of a group of students can be applied with unreliably-sampled data. Doing so we aim to reproduce earlier work that showed that, on average, eye movement synchronicity can predict performance in a comprehension quiz. We were not able to reproduce the findings with unreliably-sampled data, which highlights the challenges that lie ahead of webcam-based eye tracking in practice.}, year = {2022}, month = {6}, DOI = {10.1145/3517031.3529239}, institution = {Îçҹ̽»¨}, journal = {ETRA '22: 2022 Symposium on Eye Tracking Research and Applications}, file_url = {t3://file?uid=463880} } @Article { chi21_ssq, author = {Hirzle, Teresa and Cordts, Maurice and Rukzio, Enrico and Gugenheimer, Jan and Bulling, Andreas}, title = {A Critical Assessment of the Use of SSQ as a Measure of General Discomfort in VR Head-Mounted Displays}, year = {2021}, month = {5}, DOI = {10.1145/3411764.3445361}, journal = {In Proc. of CHI 2021 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url = {https://youtu.be/4UkAeAtENKo \_blank - \dqPresentation Video\dq}, file_url = {t3://file?uid=437948} } @Article { 499746939323_2021, author = {Rixen, Jan Ole and Hirzle, Teresa and Colley, Mark and Etzel, Yannick and Rukzio, Enrico and Gugenheimer, Jan}, title = {Exploring Augmented Visual Alterations in Interpersonal Communication}, year = {2021}, month = {5}, DOI = {10.1145/3411764.3445597}, journal = {In Proc. of CHI 2021 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url2 = {https://www.youtube.com/watch?v=Mhlem-U439Q}, file_url = {t3://file?uid=435433} } @Article { etra20_survey, author = {Hirzle, Teresa and Cordts, Maurice and Rukzio, Enrico and Bulling, Andreas}, title = {A Survey of Digital Eye Strain in Gaze-Based Interactive Systems}, year = {2020}, month = {6}, isbn = {9781450371339}, DOI = {10.1145/3379155.3391313}, booktitle = {ACM Symposium on Eye Tracking Research and Applications}, journal = {ETRA '20 Full Papers: ACM Symposium on Eye Tracking Research and Applications}, publisher = {ACM}, file_url = {t3://file?uid=424288} } @Article { 806009499392_2020, author = {El.Agroudy, Passant and Wang, Xiyue and Stemasov, Evgeny and Hirzle, Teresa and Shishkovets, Svetlana and Mehrotra, Siddharth and Schmidt, Albrecht}, title = {SpotlessMind - A Design Probe for Eliciting Attitudes towards Sharing Neurofeedback}, status = {1}, year = {2020}, month = {3}, reviewed = {1}, journal = {Augmented Humans International Conference} } @Article { chi19_designspaceforgazeinteraction, author = {Hirzle, Teresa and Gugenheimer, Jan and Geiselhart, Florian and Bulling, Andreas and Rukzio, Enrico}, title = {A Design Space for Gaze Interaction on Head-Mounted Displays}, year = {2019}, month = {5}, DOI = {10.1145/3290605.3300855}, journal = {In Proceedings of CHI 2019 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url = {/?gazedesignspace}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/hirzle/Publications/A\_Design\_Space\_for\_Gaze\_Interaction\_on\_Head-Mounted\_Displays\_CHI\_19.pdf} } @Article { chi19_hirzle_positionpaper, author = {Hirzle, Teresa and Gugenheimer, Jan and Rukzio, Enrico and Bulling, Andreas}, title = {On the Importance of Visual (Digital) Wellbeing for HMDs}, abstract = {Most digital devices are screen-based devices and as such our eyes are very much in demand when consuming digital content. This is especially important for augmented and virtual reality (AR/VR) head-mounted displays (HMDs) that are entering the consumer market bringing digital displays even closer to the eyes. The influence of looking at digital screens for the majority of our waking time manifests itself already in an increased occurrence of the computer vision syndrome (CVS). In this position paper we therefore propose to design content for HMDs explicitly around the unique properties and abilities of the human eye and the visual system to avoid visual discomfort or even possible impairments. Hereby we focus on concepts of how eye health features can implicitly be integrated as visual digital wellbeing features into content design for HMDs.}, year = {2019}, month = {5}, journal = {In Proc. of CHI 2019 Workshop on Designing for Digital Wellbeing}, web_url = {https://digitalwellbeingworkshop.wordpress.com}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/hirzle/Publications/On\_the\_Importance\_of\_Visual\_\_Digital\_\_Wellbeing\_for\_HMDs\_CHI\_19\_Workshop\_on\_Digital\_Wellbeing.pdf} } @Article { Dreja:DemoVRSpinning2018, author = {Dreja, Thomas and Rietzler, Michael and Hirzle, Teresa and Gugenheimer, Jan and Frommel, Julian and Rukzio, Enrico}, title = {A Demonstration of VRSpinning: Exploring the Design Space of a 1D Rotation Platform to Increase the Perception of Self-Motion in VR}, year = {2018}, month = {10}, DOI = {10.1145/3266037.3271645}, journal = {In Adj. Proc. of UIST '18 (ACM Symposium on User Interface Software and Technology)}, web_url2 = {/in/mi/mi-forschung/uulm-hci/projects/vrspinning/}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/Demo\_SpinVR\_2\_.pdf} } @Article { RethinkingRDW, author = {Rietzler, Michael and Gugenheimer, Jan and Hirzle, Teresa and Deubzer, Martin and Langbehn, Eike and Rukzio, Enrico}, title = {Rethinking Redirected Walking: On the Use of Curvature Gains Beyond Perceptual Limitations and Revisiting Bending Gains}, year = {2018}, month = {10}, journal = {In Proc. of ISMAR 2018 (IEEE International Symposium for Mixed and Augmented Reality)}, web_url = {https://doi.org/10.1109/ISMAR.2018.00041}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/rietzler/RedirectedWalking.pdf} } @Article { Hirzle:SymbioticHMS2018, author = {Hirzle, Teresa and Gugenheimer, Jan and Geiselhart, Florian and Bulling, Andreas and Rukzio, Enrico}, title = {Towards a Symbiotic Human-Machine Depth Sensor: Exploring 3D Gaze for Object Reconstruction}, abstract = {Eye tracking is expected to become an integral part of future augmented reality (AR) head-mounted displays (HMDs) given that it can easily be integrated into existing hardware and provides a versatile interaction modality. To augment objects in the real world, AR HMDs require a three-dimensional understanding of the scene, which is currently solved using depth cameras. In this work we aim to explore how 3D gaze data can be used to enhance scene understanding for AR HMDs by envisioning a symbiotic human-machine depth camera, fusing depth data with 3D gaze information. We present a first proof of concept, exploring to what extend we are able to recognise what a user is looking at by plotting 3D gaze data. To measure 3D gaze, we implemented a vergence-based algorithm and built an eye tracking setup consisting of a Pupil Labs headset and an OptiTrack motion capture system, allowing us to measure 3D gaze inside a 50x50x50 cm volume. We show first 3D gaze plots of \dqgazed-at\dq objects and describe our vision of a symbiotic human-machine depth camera that combines a depth camera and human 3D gaze information.}, year = {2018}, month = {10}, DOI = {10.1145/3266037.3266119}, journal = {In Adj. Proc. of UIST '18 (ACM Symposium on User Interface Software and Technology)}, keywords = {3D gaze; eye-based interaction; human-machine symbiosis}, web_url = {/?hm\_depthsensor}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/3DGazeAbstractUIST2018\_both.pdf} } @Article { Rietzler:2018:VRSpinning, author = {Rietzler, Michael and Hirzle, Teresa and Gugenheimer, Jan and Frommel, Julian and Dreja, Thomas and Rukzio, Enrico}, title = {VRSpinning: Exploring the Design Space of a 1D Rotation Platform to Increase the Perception of Self-Motion in VR}, year = {2018}, month = {6}, DOI = {10.1145/3196709.3196755}, journal = {In Proc. of DIS 2018 (ACM Conference on Designing Interactive Systems)}, web_url = {/in/mi/mi-forschung/uulm-hci/projects/vrspinning/}, web_url2 = {https://youtu.be/KzrtOPbr4t4}, file_url = {/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/SpinVR\_Small.compressed.pdf} } @Poster { Hirzle:2018:WatchVR, author = {Hirzle, Teresa and Rixen, Jan Ole and Gugenheimer, Jan and Rukzio, Enrico}, title = {WatchVR: Exploring the Usage of a Smartwatch for Interaction in Mobile Virtual Reality}, abstract = {Mobile virtual reality (VR) head-mounted displays (HMDs) are steadily becoming part of people’s everyday life. Most current interaction approaches rely either on additional hardware (e.g. Daydream Controller) or offer only a limited interaction concept (e.g. Google Cardboard). We explore a solution where a conventional smartwatch, a device users already carry around with them, is used to enable short interactions but also allows for longer complex interactions with mobile VR. To explore the possibilities of a smartwatch for interaction, we conducted a user study in which we compared two variables with regard to user performance: interaction method (touchscreen vs inertial sensors) and wearing method (hand-held vs wrist-worn). We found that selection time and error rate were lowest when holding the smartwatch in one hand using its inertial sensors for interaction (hand-held).}, year = {2018}, month = {4}, DOI = {10.1145/3170427.3188629}, organization = {In Proceedings of CHI EA '18 (CHI '18 Extended Abstracts on Human Factors in Computing Systems)}, keywords = {3D pointing; smartwatch; nomadic virtual reality; mobile virtual reality}, file_url = {t3://file?uid=435518} }