
@article{ref1,
title="Using gaze-based interaction to alleviate situational mobility impairment in extended reality",
journal="Proceedings of the Human Factors and Ergonomic Society annual meeting",
year="2022",
author="Ghasemi, Yalda and Jeong, Heejin",
volume="66",
number="1",
pages="435-439",
abstract="Significant advancements of eye-tracking technology in extended reality (XR) head-mounted displays have increased the interest in gaze-based interactions. The benefits of gaze interaction proved that it could be a suitable alternative for hand-based interactions when users face situations where they must maintain their position due to mobility impairment. This study aims to assess the user experience of the gaze-based interaction, compared to hand-based interaction, in two movement conditions of static and dynamic. Twenty-four participants took part in this study, and their experience was evaluated in terms of perceived workload, usability, and performance. The results show that gaze-based interactions significantly outperform the hand-based interaction in terms of perceived workload and usability in case of limited mobility. Also, the user performance is significantly higher in gaze-based modes under situational impairment. The findings of this study can be used for designing XR interfaces considering the situation in which the task is performed.<p /> <p>Language: en</p>",
language="en",
issn="2169-5067",
doi="10.1177/1071181322661224",
url="http://dx.doi.org/10.1177/1071181322661224"
}