@ARTICLE{10.3389/frobt.2017.00020, AUTHOR={Frank, Jared A. and Moorhead, Matthew and Kapila, Vikram}, TITLE={Mobile Mixed-Reality Interfaces That Enhance Human–Robot Interaction in Shared Spaces}, JOURNAL={Frontiers in Robotics and AI}, VOLUME={4}, YEAR={2017}, URL={https://www.frontiersin.org/articles/10.3389/frobt.2017.00020}, DOI={10.3389/frobt.2017.00020}, ISSN={2296-9144}, ABSTRACT={Although user interfaces with gesture-based input and augmented graphics have promoted intuitive human–robot interactions (HRI), they are often implemented in remote applications on research-grade platforms requiring significant training and limiting operator mobility. This paper proposes a mobile mixed-reality interface approach to enhance HRI in shared spaces. As a user points a mobile device at the robot’s workspace, a mixed-reality environment is rendered providing a common frame of reference for the user and robot to effectively communicate spatial information for performing object manipulation tasks, improving the user’s situational awareness while interacting with augmented graphics to intuitively command the robot. An evaluation with participants is conducted to examine task performance and user experience associated with the proposed interface strategy in comparison to conventional approaches that utilize egocentric or exocentric views from cameras mounted on the robot or in the environment, respectively. Results indicate that, despite the suitability of the conventional approaches in remote applications, the proposed interface approach provides comparable task performance and user experiences in shared spaces without the need to install operator stations or vision systems on or around the robot. Moreover, the proposed interface approach provides users the flexibility to direct robots from their own visual perspective (at the expense of some physical workload) and leverages the sensing capabilities of the tablet to expand the robot’s perceptual range.} }