@ARTICLE{10.3389/fnins.2014.00009, AUTHOR={Clady, Xavier and Clercq, Charles and Ieng, Sio-Hoi and Houseini, Fouzhan and Randazzo, Marco and Natale, Lorenzo and Bartolozzi, Chiara and Benosman, Ryad}, TITLE={Asynchronous visual event-based time-to-contact}, JOURNAL={Frontiers in Neuroscience}, VOLUME={8}, YEAR={2014}, URL={https://www.frontiersin.org/articles/10.3389/fnins.2014.00009}, DOI={10.3389/fnins.2014.00009}, ISSN={1662-453X}, ABSTRACT={Reliable and fast sensing of the environment is a fundamental requirement for autonomous mobile robotic platforms. Unfortunately, the frame-based acquisition paradigm at the basis of main stream artificial perceptive systems is limited by low temporal dynamics and redundant data flow, leading to high computational costs. Hence, conventional sensing and relative computation are obviously incompatible with the design of high speed sensor-based reactive control for mobile applications, that pose strict limits on energy consumption and computational load. This paper introduces a fast obstacle avoidance method based on the output of an asynchronous event-based time encoded imaging sensor. The proposed method relies on an event-based Time To Contact (TTC) computation based on visual event-based motion flows. The approach is event-based in the sense that every incoming event adds to the computation process thus allowing fast avoidance responses. The method is validated indoor on a mobile robot, comparing the event-based TTC with a laser range finder TTC, showing that event-based sensing offers new perspectives for mobile robotics sensing.} }