We present a method of utilizing depth information as provided by RGBD sensors for robust real-time visual simultaneous localisation and mapping (SLAM) by augmenting monocular visual SLAM to take into account depth data. This is implemented based on the feely available software "Parallel Tracking and Mapping'' by Georg Klein. Our modifications allow PTAM to be used as a 6D visual SLAM system even without any additional information about odometry or from an inertial measurement unit.
[pdf]
@inproceedings{scherer2012, author = {Scherer, Sebastian A. and Daniel Dube and Andreas Zell}, title = {{Using Depth in Visual Simultaneous Localisation and Mapping}}, booktitle = {IEEE International Conference on Robotics and Automation}, year = {2012}, address = {St. Paul, Minnesota, USA}, month = {May}, abstract = {We present a method of utilizing depth information as provided by RGBD sensors for robust real-time visual simultaneous localisation and mapping (SLAM) by augmenting monocular visual SLAM to take into account depth data. This is implemented based on the feely available software ``Parallel Tracking and Mapping'' by Georg Klein. Our modifications allow PTAM to be used as a 6D visual SLAM system even without any additional information about odometry or from an inertial measurement unit.}, url = {http://www.cogsys.cs.uni-tuebingen.de/publikationen/2012/scherer2012.pdf} }