@inproceedings{WilberzLeschtschowTrepkowskietal.2020, author = {Alexander Wilberz and Dominik Leschtschow and Christina Trepkowski and Jens Maiero and Ernst Kruijff and Bernhard Riecke}, title = {FaceHaptics: Robot Arm based Versatile Facial Haptics for Immersive Environments}, series = {CHI '20: Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems, April 25–30, 2020, Honolulu, HI, USA}, publisher = {ACM}, isbn = {978-1-4503-6708-0}, doi = {10.1145/3313831.3376481}, pages = {1 -- 14}, year = {2020}, abstract = {This paper introduces FaceHaptics, a novel haptic display based on a robot arm attached to a head-mounted virtual reality display. It provides localized, multi-directional and movable haptic cues in the form of wind, warmth, moving and single-point touch events and water spray to dedicated parts of the face not covered by the head-mounted display.The easily extensible system, however, can principally mount any type of compact haptic actuator or object. User study 1 showed that users appreciate the directional resolution of cues, and can judge wind direction well, especially when they move their head and wind direction is adjusted dynamically to compensate for head rotations. Study 2 showed that adding FaceHaptics cues to a VR walkthrough can significantly improve user experience, presence, and emotional responses.}, language = {en} }