@inproceedings{2b5e65e699d4447b91bf1631df045a65,
title = "Human gaze commands classification: A shape based approach to interfacing with robots",
abstract = "The sense of sight is one of the main outlets to how we interact with the world around us. Using eye tracking methods, this sensory input channel may also be used as an output channel to provide commands for robots to follow. These gaze-commanded robots could then be used to assist severely mobility-limited individuals in the home or similar environments. This paper explores the use of visually drawn shapes as the input for robot commands. These commands were recorded using low-cost gaze tracking hardware (Gazepoint GP3 Eye Tracker). The data were then processed using a custom algorithm in MATLAB to detect commands to be passed to a small humanoid robot (NAO). Using the techniques and procedures given in this paper, people with limited mobility will be able to input shape commands to have robots like NAO react as personal assistants. This is also extensible to gaze-based human-machine interfaces in general for a variety of applications.",
keywords = "Assistive Robot, Gaze, Gaze Tracking, Gaze-based Control, Human-Robot Interaction, Shape Recognition",
author = "Craig, {Trevor Lynn} and Nelson, {Carl A.} and Songpo Li and Xiaoli Zhang",
year = "2016",
month = oct,
day = "7",
doi = "10.1109/MESA.2016.7587154",
language = "English (US)",
series = "MESA 2016 - 12th IEEE/ASME International Conference on Mechatronic and Embedded Systems and Applications - Conference Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "MESA 2016 - 12th IEEE/ASME International Conference on Mechatronic and Embedded Systems and Applications - Conference Proceedings",
note = "12th IEEE/ASME International Conference on Mechatronic and Embedded Systems and Applications, MESA 2016 ; Conference date: 29-08-2016 Through 31-08-2016",
}