@INPROCEEDINGS{9284794,
author={Li, Richard and Whitmire, Eric and Stengel, Michael and Boudaoud, Ben and Kautz, Jan and Luebke, David and Patel, Shwetak and Akşit, Kaan},
booktitle={2020 IEEE International Symposium on Mixed and Augmented Reality (ISMAR)},
title={Optical Gaze Tracking with Spatially-Sparse Single-Pixel Detectors},
year={2020},
volume={},
number={},
pages={117-126},
abstract={Gaze tracking is an essential component of next generation displays for virtual reality and augmented reality applications. Traditional camera-based gaze trackers used in next generation displays are known to be lacking in one or multiple of the following metrics: power consumption, cost, computational complexity, estimation accuracy, latency, and form-factor. We propose the use of discrete photodiodes and light-emitting diodes (LEDs) as an alternative to traditional camera-based gaze tracking approaches while taking all of these metrics into consideration. We begin by developing a rendering-based simulation framework for understanding the relationship between light sources and a virtual model eyeball. Findings from this framework are used for the placement of LEDs and photodiodes. Our first prototype uses a neural network to obtain an average error rate of 2.67° at 400 Hz while demanding only 16 mW. By simplifying the implementation to using only LEDs, duplexed as light transceivers, and more minimal machine learning model, namely a light-weight supervised Gaussian process regression algorithm, we show that our second prototype is capable of an average error rate of 1.57° at 250 Hz using 800 mW.},
keywords={},
doi={10.1109/ISMAR50242.2020.00033},
ISSN={1554-7868},
month={Nov},}