Citation
@article{katz2012,
author = {Brian Katz and Slim Kammoun and Gaëtan Parseihian and
Olivier Gutierrez and Adrien Brilhault and Malika Auvray and
Philippe Truillet and Michel Denis and Simon Thorpe and Christophe
Jouffrais},
title = {NAVIG: Augmented Reality Guidance System for the Visually
Impaired},
journal = {Virtual Reality},
volume = {16},
number = {4},
pages = {253-269},
date = {12-06-12},
url = {https://link.springer.com/article/10.1007/s10055-012-0213-6},
doi = {10.1007/s10055-012-0213-6},
issn = {1359-4338, 1434-9957},
langid = {en},
abstract = {Navigating complex routes and finding objects of interest
are challenging tasks for the visually impaired. The project NAVIG
(Navigation Assisted by artificial VIsion and GNSS) is directed
toward increasing personal autonomy via a virtual augmented reality
system. The system integrates an adapted geographic information
system with different classes of objects useful for improving route
selection and guidance. The database also includes models of
important geolocated objects that may be detected by real-time
embedded vision algorithms. Object localization (relative to the
user) may serve both global positioning and sensorimotor actions
such as heading, grasping, or piloting. The user is guided to his
desired destination through spatialized semantic audio rendering,
always maintained in the head-centered reference frame. This paper
presents the overall project design and architecture of the NAVIG
system. In addition, details of a new type of detection and
localization device are presented. This approach combines a
bio-inspired vision system that can recognize and locate objects
very quickly and a 3D sound rendering system that is able to
perceptually position a sound at the location of the recognized
object. This system was developed in relation to guidance directives
developed through participative design with potential users and
educators for the visually impaired.}
}