@inproceedings{FaltaousSchoenherrDetjenetal.2019, author = {Sarah Faltaous and Chris Sch{\"o}nherr and Henrik Detjen and Stefan Scheegass}, title = {Exploring proprioceptive take-over requests for highly automated vehicles}, series = {Proceedings of the 18th international conference on mobile and ubiquitous multimedia}, doi = {https://doi.org/10.1145/3365610.3365644}, pages = {1 -- 6}, year = {2019}, abstract = {The uprising levels of autonomous vehicles allow the drivers to shift their attention to non-driving tasks while driving (ie, texting, reading, or watching movies). However, these systems are prone to failure and, thus, depending on human intervention becomes crucial in critical situations. In this work, we propose using human actuation as a new mean of communicating take-over requests (TOR) through proprioception. We conducted a user study via a driving simulation in the presence of a complex working memory span task. We communicated TORs through four different modalities, namely, vibrotactile, audio, visual, and proprioception. Our results show that the vibrotactile condition yielded the fastest reaction time followed by proprioception. Additionally, proprioceptive cues resulted in the second best performance of the non-driving task following auditory cues.}, language = {en} }