% % This file was created by the TYPO3 extension % publications % --- Timezone: CEST % Creation date: 2023-02-04 % Creation time: 07:23:19 % --- Number of references % 14 % @Article { tochi_21_hirzle, author = {Hirzle, Teresa and Fischbach, Fabian and Karlbauer, Julian and Jansen, Pascal and Gugenheimer, Jan and Rukzio, Enrico and Bulling, Andreas}, title = {Understanding, Addressing, and Analysing Digital Eye Strain in Virtual Reality Head-Mounted Displays}, status = {1}, year = {2022}, month = {8}, DOI = {10.1145/3492802}, journal = {ACM Transactions on Computer-Human Interaction (TOCHI)}, volume = {29}, publisher = {ACM}, series = {4}, pages = {1-80}, web_url2 = {https://youtu.be/ns2HwQ2p\_hM \_blank}, file_url = {t3://file?uid=456150} } @Article { hirzle_AttentionOfManyObservers_2022, author = {Hirzle, Teresa and Sauter, Marian and Wagner, Tobias and Hummel, Susanne and Rukzio, Enrico and Huckauf, Anke}, title = {Attention of Many Observers Visualized by Eye Movements}, abstract = {Interacting with a group of people requires to direct the attention of the whole group, thus requires feedback about the crowd’s attention. In face-to-face interactions, head and eye movements serve as indicator for crowd attention. However, when interacting online, such indicators are not available. To substitute this information, gaze visualizations were adapted for a crowd scenario. We developed, implemented, and evaluated four types of visualizations of crowd attention in an online study with 72 participants using lecture videos enriched with audience’s gazes. All participants reported increased connectedness to the audience, especially for visualizations depicting the whole distribution of gaze including spatial information. Visualizations avoiding spatial overlay by depicting only the variability were regarded as less helpful, for real-time as well as for retrospective analyses of lectures. Improving our visualizations of crowd attention has the potential for a broad variety of applications, in all kinds of social interaction and communication in groups.}, year = {2022}, month = {06}, DOI = {10.1145/3517031.3529235}, institution = {Ulm University}, journal = {ETRA '22: 2022 Symposium on Eye Tracking Research and Applications}, tags = {hirzle\_AttentionOfManyObservers\_2022}, web_url2 = {https://www.uni-ulm.de/in/mi/hci/projects/attention-of-many-observers-visualized-by-eye-movements/}, file_url = {t3://file?uid=463792} } @Article { sauter_CanEyeMovement_2022, author = {Sauter, Marian and Hirzle, Teresa and Wagner, Tobias and Hummel, Susanne and Rukzio, Enrico and Huckauf, A}, title = {Can Eye Movement Synchronicity Predict Test Performance With Unreliably-Sampled Data in an Online Learning Context?}, abstract = {Webcam-based eye-tracking promises easy and quick data collection without the need for specific or additional eye-tracking hardware. This makes it especially attractive for educational research, in particular for modern formats, such as MOOCs. However, in order to fulfill its promises, webcam-based eye tracking has to overcome several challenges, most importantly, varying spatial and temporal resolutions. Another challenge that the educational domain faces especially, is that typically individual students are of interest in contrast to average values. In this paper, we explore whether an attention measure that is based on eye movement synchronicity of a group of students can be applied with unreliably-sampled data. Doing so we aim to reproduce earlier work that showed that, on average, eye movement synchronicity can predict performance in a comprehension quiz. We were not able to reproduce the findings with unreliably-sampled data, which highlights the challenges that lie ahead of webcam-based eye tracking in practice.}, year = {2022}, month = {06}, DOI = {10.1145/3517031.3529239}, institution = {Ulm University}, journal = {ETRA '22: 2022 Symposium on Eye Tracking Research and Applications}, file_url = {t3://file?uid=463880} } @Article { chi21_ssq, author = {Hirzle, Teresa and Cordts, Maurice and Rukzio, Enrico and Gugenheimer, Jan and Bulling, Andreas}, title = {A Critical Assessment of the Use of SSQ as a Measure of General Discomfort in VR Head-Mounted Displays}, year = {2021}, month = {5}, DOI = {10.1145/3411764.3445361}, journal = {In Proc. of CHI 2021 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url = {https://youtu.be/4UkAeAtENKo \_blank - \dqPresentation Video\dq}, file_url = {t3://file?uid=437948} } @Article { 482001684452_2021, author = {Rixen, Jan Ole and Hirzle, Teresa and Colley, Mark and Etzel, Yannick and Rukzio, Enrico and Gugenheimer, Jan}, title = {Exploring Augmented Visual Alterations in Interpersonal Communication}, year = {2021}, month = {5}, DOI = {10.1145/3411764.3445597}, journal = {In Proc. of CHI 2021 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url2 = {https://www.youtube.com/watch?v=Mhlem-U439Q}, file_url = {t3://file?uid=435433} } @Article { etra20_survey, author = {Hirzle, Teresa and Cordts, Maurice and Rukzio, Enrico and Bulling, Andreas}, title = {A Survey of Digital Eye Strain in Gaze-Based Interactive Systems}, year = {2020}, month = {6}, isbn = {9781450371339}, DOI = {10.1145/3379155.3391313}, booktitle = {ACM Symposium on Eye Tracking Research and Applications}, journal = {ETRA '20 Full Papers: ACM Symposium on Eye Tracking Research and Applications}, publisher = {ACM}, file_url = {t3://file?uid=424288} } @Article { 518163079193_2020, author = {El.Agroudy, Passant and Wang, Xiyue and Stemasov, Evgeny and Hirzle, Teresa and Shishkovets, Svetlana and Mehrotra, Siddharth and Schmidt, Albrecht}, title = {SpotlessMind - A Design Probe for Eliciting Attitudes towards Sharing Neurofeedback}, status = {1}, year = {2020}, month = {3}, reviewed = {1}, journal = {Augmented Humans International Conference} } @Article { chi19_designspaceforgazeinteraction, author = {Hirzle, Teresa and Gugenheimer, Jan and Geiselhart, Florian and Bulling, Andreas and Rukzio, Enrico}, title = {A Design Space for Gaze Interaction on Head-Mounted Displays}, year = {2019}, month = {5}, DOI = {10.1145/3290605.3300855}, journal = {In Proceedings of CHI 2019 (SIGCHI Conference on Human Factors in Computing Systems)}, publisher = {ACM}, web_url = {https://www.uni-ulm.de/?gazedesignspace}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/hirzle/Publications/A\_Design\_Space\_for\_Gaze\_Interaction\_on\_Head-Mounted\_Displays\_CHI\_19.pdf} } @Article { chi19_hirzle_positionpaper, author = {Hirzle, Teresa and Gugenheimer, Jan and Rukzio, Enrico and Bulling, Andreas}, title = {On the Importance of Visual (Digital) Wellbeing for HMDs}, abstract = {Most digital devices are screen-based devices and as such our eyes are very much in demand when consuming digital content. This is especially important for augmented and virtual reality (AR/VR) head-mounted displays (HMDs) that are entering the consumer market bringing digital displays even closer to the eyes. The influence of looking at digital screens for the majority of our waking time manifests itself already in an increased occurrence of the computer vision syndrome (CVS). In this position paper we therefore propose to design content for HMDs explicitly around the unique properties and abilities of the human eye and the visual system to avoid visual discomfort or even possible impairments. Hereby we focus on concepts of how eye health features can implicitly be integrated as visual digital wellbeing features into content design for HMDs.}, year = {2019}, month = {5}, journal = {In Proc. of CHI 2019 Workshop on Designing for Digital Wellbeing}, web_url = {https://digitalwellbeingworkshop.wordpress.com}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/hirzle/Publications/On\_the\_Importance\_of\_Visual\_\_Digital\_\_Wellbeing\_for\_HMDs\_CHI\_19\_Workshop\_on\_Digital\_Wellbeing.pdf} } @Article { Dreja:DemoVRSpinning2018, author = {Dreja, Thomas and Rietzler, Michael and Hirzle, Teresa and Gugenheimer, Jan and Frommel, Julian and Rukzio, Enrico}, title = {A Demonstration of VRSpinning: Exploring the Design Space of a 1D Rotation Platform to Increase the Perception of Self-Motion in VR}, year = {2018}, month = {10}, DOI = {10.1145/3266037.3271645}, journal = {In Adj. Proc. of UIST '18 (ACM Symposium on User Interface Software and Technology)}, web_url2 = {https://www.uni-ulm.de/in/mi/mi-forschung/uulm-hci/projects/vrspinning/}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/Demo\_SpinVR\_2\_.pdf} } @Article { RethinkingRDW, author = {Rietzler, Michael and Gugenheimer, Jan and Hirzle, Teresa and Deubzer, Martin and Langbehn, Eike and Rukzio, Enrico}, title = {Rethinking Redirected Walking: On the Use of Curvature Gains Beyond Perceptual Limitations and Revisiting Bending Gains}, year = {2018}, month = {10}, journal = {In Proc. of ISMAR 2018 (IEEE International Symposium for Mixed and Augmented Reality)}, web_url = {https://doi.org/10.1109/ISMAR.2018.00041}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/mitarbeiterbereiche/rietzler/RedirectedWalking.pdf} } @Article { Hirzle:SymbioticHMS2018, author = {Hirzle, Teresa and Gugenheimer, Jan and Geiselhart, Florian and Bulling, Andreas and Rukzio, Enrico}, title = {Towards a Symbiotic Human-Machine Depth Sensor: Exploring 3D Gaze for Object Reconstruction}, abstract = {Eye tracking is expected to become an integral part of future augmented reality (AR) head-mounted displays (HMDs) given that it can easily be integrated into existing hardware and provides a versatile interaction modality. To augment objects in the real world, AR HMDs require a three-dimensional understanding of the scene, which is currently solved using depth cameras. In this work we aim to explore how 3D gaze data can be used to enhance scene understanding for AR HMDs by envisioning a symbiotic human-machine depth camera, fusing depth data with 3D gaze information. We present a first proof of concept, exploring to what extend we are able to recognise what a user is looking at by plotting 3D gaze data. To measure 3D gaze, we implemented a vergence-based algorithm and built an eye tracking setup consisting of a Pupil Labs headset and an OptiTrack motion capture system, allowing us to measure 3D gaze inside a 50x50x50 cm volume. We show first 3D gaze plots of \dqgazed-at\dq objects and describe our vision of a symbiotic human-machine depth camera that combines a depth camera and human 3D gaze information.}, year = {2018}, month = {10}, DOI = {10.1145/3266037.3266119}, journal = {In Adj. Proc. of UIST '18 (ACM Symposium on User Interface Software and Technology)}, keywords = {3D gaze; eye-based interaction; human-machine symbiosis}, web_url = {https://www.uni-ulm.de/?hm\_depthsensor}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/3DGazeAbstractUIST2018\_both.pdf} } @Article { Rietzler:2018:VRSpinning, author = {Rietzler, Michael and Hirzle, Teresa and Gugenheimer, Jan and Frommel, Julian and Dreja, Thomas and Rukzio, Enrico}, title = {VRSpinning: Exploring the Design Space of a 1D Rotation Platform to Increase the Perception of Self-Motion in VR}, year = {2018}, month = {6}, DOI = {10.1145/3196709.3196755}, journal = {In Proc. of DIS 2018 (ACM Conference on Designing Interactive Systems)}, web_url = {https://www.uni-ulm.de/in/mi/mi-forschung/uulm-hci/projects/vrspinning/}, web_url2 = {https://youtu.be/KzrtOPbr4t4}, file_url = {https://www.uni-ulm.de/fileadmin/website\_uni\_ulm/iui.inst.100/institut/Papers/Prof\_Rukzio/2018/SpinVR\_Small.compressed.pdf} } @Poster { Hirzle:2018:WatchVR, author = {Hirzle, Teresa and Rixen, Jan Ole and Gugenheimer, Jan and Rukzio, Enrico}, title = {WatchVR: Exploring the Usage of a Smartwatch for Interaction in Mobile Virtual Reality}, abstract = {Mobile virtual reality (VR) head-mounted displays (HMDs) are steadily becoming part of people’s everyday life. Most current interaction approaches rely either on additional hardware (e.g. Daydream Controller) or offer only a limited interaction concept (e.g. Google Cardboard). We explore a solution where a conventional smartwatch, a device users already carry around with them, is used to enable short interactions but also allows for longer complex interactions with mobile VR. To explore the possibilities of a smartwatch for interaction, we conducted a user study in which we compared two variables with regard to user performance: interaction method (touchscreen vs inertial sensors) and wearing method (hand-held vs wrist-worn). We found that selection time and error rate were lowest when holding the smartwatch in one hand using its inertial sensors for interaction (hand-held).}, year = {2018}, month = {4}, DOI = {10.1145/3170427.3188629}, organization = {In Proceedings of CHI EA '18 (CHI '18 Extended Abstracts on Human Factors in Computing Systems)}, keywords = {3D pointing; smartwatch; nomadic virtual reality; mobile virtual reality}, file_url = {t3://file?uid=435518} }