@COMMENT This file was generated by bib2html.pl version 0.94 @COMMENT written by Patrick Riley @Article{TVCG15-dj, author = {J. Edward {Swan~II} and Gurjot Singh and Stephen R. Ellis}, title = {Matching and Reaching Depth Judgments with Real and Augmented Reality Targets}, journal = {IEEE Transactions on Visualization and Computer Graphics, IEEE International Symposium on Mixed and Augmented Reality (ISMAR 2015)}, volume = 21, number = 11, year = 2015, pages = {1289--1298}, note = {DOI: 10.1109/TVCG.2015.2459895.} abstract = { Many compelling augmented reality (AR) applications require users to correctly perceive the location of virtual objects, some with accuracies as tight as 1 mm. However, measuring the perceived depth of AR objects at these accuracies has not yet been demonstrated. In this paper, we address this challenge by employing two different depth judgment methods, perceptual matching and blind reaching, in a series of three experiments, where observers judged the depth of real and AR target objects presented at reaching distances. Our experiments found that observers can accurately match the distance of a real target, but when viewing an AR target through collimating optics, their matches systematically overestimate the distance by 0.5 to 4.0 cm. However, these results can be explained by a model where the collimation causes the eyes' vergence angle to rotate outward by a constant angular amount. These findings give error bounds for using collimating AR displays at reaching distances, and suggest that for these applications, AR displays need to provide an adjustable focus. Our experiments further found that observers initially reach ~4 cm too short, but reaching accuracy improves with both consistent proprioception and corrective visual feedback, and eventually becomes nearly as accurate as matching. }, }