Skip to content

Cplusplus RGBD Mapping

matlabbe edited this page Mar 23, 2016 · 25 revisions

Introduction

This tutorial will show an example of usage of the RTAB-Map library in C++ for RGB-D mapping.

Details

Project configuration (example with CMake)

Here our project directory:

MyProject
|
-->CMakeLists.txt
-->main.cpp
-->MapBuilder.h

Files:

In your CMakeLists.txt:

cmake_minimum_required(VERSION 2.8)
PROJECT( MyProject )

FIND_PACKAGE(RTABMap REQUIRED)
FIND_PACKAGE(OpenCV REQUIRED)
FIND_PACKAGE(PCL 1.7 REQUIRED)

# Use Qt4 or Qt5
SET(MY_QT_VERSION 4 CACHE STRING "Which Qt version to use")
IF("${MY_QT_VERSION}" STREQUAL "4")
   FIND_PACKAGE(Qt4 COMPONENTS QtCore QtGui QtSvg)
ELSE()
   FIND_PACKAGE(Qt5 COMPONENTS Widgets Core Gui Svg)
ENDIF()

# include directories
SET(INCLUDE_DIRS
   ${RTABMap_INCLUDE_DIRS}
   ${OpenCV_INCLUDE_DIRS}
   ${PCL_INCLUDE_DIRS}
)

IF("${MY_QT_VERSION}" STREQUAL "4")
   INCLUDE(${QT_USE_FILE})
ENDIF()

INCLUDE_DIRECTORIES(${INCLUDE_DIRS})

# libraries
SET(LIBRARIES
   ${RTABMap_LIBRARIES}
   ${OpenCV_LIBRARIES}
   ${QT_LIBRARIES} 
   ${PCL_LIBRARIES}
)

# Qt moc
SET(qt_headers
   MapBuilder.h
)
IF("${MY_QT_VERSION}" STREQUAL "4")
   QT4_WRAP_CPP(moc_srcs ${qt_headers})
ELSE()
   QT5_WRAP_CPP(moc_srcs ${qt_headers})
ENDIF()

# app
ADD_EXECUTABLE(rgbd_example main.cpp ${moc_srcs})
TARGET_LINK_LIBRARIES(rgbd_example ${LIBRARIES})

Code

The main.cpp

#include <rtabmap/core/OdometryF2M.h>
#include "rtabmap/core/Rtabmap.h"
#include "rtabmap/core/RtabmapThread.h"
#include "rtabmap/core/CameraRGBD.h"
#include "rtabmap/core/CameraThread.h"
#include "rtabmap/core/OdometryThread.h"
#include "rtabmap/core/Graph.h"
#include "rtabmap/utilite/UEventsManager.h"
#include <QApplication>
#include <stdio.h>
#include <pcl/io/pcd_io.h>
#include <pcl/io/ply_io.h>

#include "MapBuilder.h"

void showUsage()
{
   printf("\nUsage:\n"
   		"rtabmap-rgbd_mapping driver\n"
   		"  driver       Driver number to use: 0=OpenNI-PCL, 1=OpenNI2, 2=Freenect, 3=OpenNI-CV, 4=OpenNI-CV-ASUS\n\n");
   exit(1);
}

using namespace rtabmap;
int main(int argc, char * argv[])
{
   ULogger::setType(ULogger::kTypeConsole);
   ULogger::setLevel(ULogger::kWarning);

   int driver = 0;
   if(argc < 2)
   {
   	showUsage();
   }
   else
   {
   	driver = atoi(argv[argc-1]);
   	if(driver < 0 || driver > 4)
   	{
   		UERROR("driver should be between 0 and 4.");
   		showUsage();
   	}
   }

   // Here is the pipeline that we will use:
   // CameraOpenni -> "CameraEvent" -> OdometryThread -> "OdometryEvent" -> RtabmapThread -> "RtabmapEvent"

   // Create the OpenNI camera, it will send a CameraEvent at the rate specified.
   // Set transform to camera so z is up, y is left and x going forward
   Camera * camera = 0;
   Transform opticalRotation(0,0,1,0, -1,0,0,0, 0,-1,0,0);
   if(driver == 1)
   {
   	if(!CameraOpenNI2::available())
   	{
   		UERROR("Not built with OpenNI2 support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNI2("", 0, opticalRotation);
   }
   else if(driver == 2)
   {
   	if(!CameraFreenect::available())
   	{
   		UERROR("Not built with Freenect support...");
   		exit(-1);
   	}
   	camera = new CameraFreenect(0, 0, opticalRotation);
   }
   else if(driver == 3)
   {
   	if(!CameraOpenNICV::available())
   	{
   		UERROR("Not built with OpenNI from OpenCV support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNICV(false, 0, opticalRotation);
   }
   else if(driver == 4)
   {
   	if(!CameraOpenNICV::available())
   	{
   		UERROR("Not built with OpenNI from OpenCV support...");
   		exit(-1);
   	}
   	camera = new CameraOpenNICV(true, 0, opticalRotation);
   }
   else
   {
   	camera = new rtabmap::CameraOpenni("", 0, opticalRotation);
   }

   if(!camera->init())
   {
   	UERROR("Camera init failed!");
   }

   CameraThread cameraThread(camera);


   // GUI stuff, there the handler will receive RtabmapEvent and construct the map
   // We give it the camera so the GUI can pause/resume the camera
   QApplication app(argc, argv);
   MapBuilder mapBuilder(&cameraThread);

   // Create an odometry thread to process camera events, it will send OdometryEvent.
   OdometryThread odomThread(new OdometryF2M());


   // Create RTAB-Map to process OdometryEvent
   Rtabmap * rtabmap = new Rtabmap();
   rtabmap->init();
   RtabmapThread rtabmapThread(rtabmap); // ownership is transfered

   // Setup handlers
   odomThread.registerToEventsManager();
   rtabmapThread.registerToEventsManager();
   mapBuilder.registerToEventsManager();

   // The RTAB-Map is subscribed by default to CameraEvent, but we want
   // RTAB-Map to process OdometryEvent instead, ignoring the CameraEvent.
   // We can do that by creating a "pipe" between the camera and odometry, then
   // only the odometry will receive CameraEvent from that camera. RTAB-Map is
   // also subscribed to OdometryEvent by default, so no need to create a pipe between
   // odometry and RTAB-Map.
   UEventsManager::createPipe(&cameraThread, &odomThread, "CameraEvent");

   // Let's start the threads
   rtabmapThread.start();
   odomThread.start();
   cameraThread.start();

   mapBuilder.show();
   app.exec(); // main loop

   // remove handlers
   mapBuilder.unregisterFromEventsManager();
   rtabmapThread.unregisterFromEventsManager();
   odomThread.unregisterFromEventsManager();

   // Kill all threads
   cameraThread.kill();
   odomThread.join(true);
   rtabmapThread.join(true);

   // Save 3D map
   printf("Saving rtabmap_cloud.pcd...\n");
   std::map<int, Signature> nodes;
   std::map<int, Transform> optimizedPoses;
   std::multimap<int, Link> links;
   rtabmap->get3DMap(nodes, optimizedPoses, links, true, true);
   pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGB>);
   for(std::map<int, Transform>::iterator iter=optimizedPoses.begin(); iter!=optimizedPoses.end(); ++iter)
   {
   	Signature node = nodes.find(iter->first)->second;

   	// uncompress data
   	node.sensorData().uncompressData();

   	pcl::PointCloud<pcl::PointXYZRGB>::Ptr tmp = util3d::cloudRGBFromSensorData(
   			node.sensorData(),
   			4,           // image decimation before creating the clouds
   			4.0f,        // maximum depth of the cloud
   			0.01f);      // Voxel grid filtering
   	*cloud += *util3d::transformPointCloud(tmp, iter->second); // transform the point cloud to its pose
   }
   if(cloud->size())
   {
   	printf("Voxel grid filtering of the assembled cloud (voxel=%f, %d points)\n", 0.01f, (int)cloud->size());
   	cloud = util3d::voxelize(cloud, 0.01f);

   	printf("Saving rtabmap_cloud.pcd... done! (%d points)\n", (int)cloud->size());
   	pcl::io::savePCDFile("rtabmap_cloud.pcd", *cloud);
   	//pcl::io::savePLYFile("rtabmap_cloud.ply", *cloud); // to save in PLY format
   }
   else
   {
   	printf("Saving rtabmap_cloud.pcd... failed! The cloud is empty.\n");
   }

   // Save trajectory
   printf("Saving rtabmap_trajectory.txt ...\n");
   if(optimizedPoses.size() && graph::exportPoses("rtabmap_trajectory.txt", 0, optimizedPoses, links))
   {
   	printf("Saving rtabmap_trajectory.txt... done!\n");
   }
   else
   {
   	printf("Saving rtabmap_trajectory.txt... failed!\n");
   }

   return 0;
}

The MapBuilder class (for visualization):

#ifndef MAPBUILDER_H_
#define MAPBUILDER_H_

#include <QVBoxLayout>
#include <QtCore/QMetaType>
#include <QAction>

#ifndef Q_MOC_RUN // Mac OS X issue
#include "rtabmap/gui/CloudViewer.h"
#include "rtabmap/core/util3d.h"
#include "rtabmap/core/util3d_filtering.h"
#include "rtabmap/core/util3d_transforms.h"
#include "rtabmap/core/RtabmapEvent.h"
#endif
#include "rtabmap/utilite/UStl.h"
#include "rtabmap/utilite/UConversion.h"
#include "rtabmap/utilite/UEventsHandler.h"
#include "rtabmap/utilite/ULogger.h"
#include "rtabmap/core/OdometryEvent.h"
#include "rtabmap/core/CameraThread.h"

using namespace rtabmap;

// This class receives RtabmapEvent and construct/update a 3D Map
class MapBuilder : public QWidget, public UEventsHandler
{
   Q_OBJECT
public:
   //Camera ownership is not transferred!
   MapBuilder(CameraThread * camera = 0) :
   	camera_(camera),
   	odometryCorrection_(Transform::getIdentity()),
   	processingStatistics_(false),
   	lastOdometryProcessed_(true)
   {
   	this->setWindowFlags(Qt::Dialog);
   	this->setWindowTitle(tr("3D Map"));
   	this->setMinimumWidth(800);
   	this->setMinimumHeight(600);

   	cloudViewer_ = new CloudViewer(this);

   	QVBoxLayout *layout = new QVBoxLayout();
   	layout->addWidget(cloudViewer_);
   	this->setLayout(layout);

   	qRegisterMetaType<rtabmap::OdometryEvent>("rtabmap::OdometryEvent");
   	qRegisterMetaType<rtabmap::Statistics>("rtabmap::Statistics");

   	QAction * pause = new QAction(this);
   	this->addAction(pause);
   	pause->setShortcut(Qt::Key_Space);
   	connect(pause, SIGNAL(triggered()), this, SLOT(pauseDetection()));
   }

   virtual ~MapBuilder()
   {
   	this->unregisterFromEventsManager();
   }

protected slots:
   virtual void pauseDetection()
   {
   	UWARN("");
   	if(camera_)
   	{
   		if(camera_->isCapturing())
   		{
   			camera_->join(true);
   		}
   		else
   		{
   			camera_->start();
   		}
   	}
   }

   virtual void processOdometry(const rtabmap::OdometryEvent & odom)
   {
   	if(!this->isVisible())
   	{
   		return;
   	}

   	Transform pose = odom.pose();
   	if(pose.isNull())
   	{
   		//Odometry lost
   		cloudViewer_->setBackgroundColor(Qt::darkRed);

   		pose = lastOdomPose_;
   	}
   	else
   	{
   		cloudViewer_->setBackgroundColor(cloudViewer_->getDefaultBackgroundColor());
   	}
   	if(!pose.isNull())
   	{
   		lastOdomPose_ = pose;

   		// 3d cloud
   		if(odom.data().depthOrRightRaw().cols == odom.data().imageRaw().cols &&
   		   odom.data().depthOrRightRaw().rows == odom.data().imageRaw().rows &&
   		   !odom.data().depthOrRightRaw().empty() &&
   		   (odom.data().stereoCameraModel().isValid() || odom.data().cameraModels().size()))
   		{
   			pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = util3d::cloudRGBFromSensorData(
   				odom.data(),
   				2,     // decimation
   				4.0f); // max depth
   			if(cloud->size())
   			{
   				if(!cloudViewer_->addOrUpdateCloud("cloudOdom", cloud, odometryCorrection_*pose))
   				{
   					UERROR("Adding cloudOdom to viewer failed!");
   				}
   			}
   			else
   			{
   				cloudViewer_->setCloudVisibility("cloudOdom", false);
   				UWARN("Empty cloudOdom!");
   			}
   		}

   		if(!odom.pose().isNull())
   		{
   			// update camera position
   			cloudViewer_->updateCameraTargetPosition(odometryCorrection_*odom.pose());
   		}
   	}
   	cloudViewer_->update();

   	lastOdometryProcessed_ = true;
   }


   virtual void processStatistics(const rtabmap::Statistics & stats)
   {
   	processingStatistics_ = true;

   	//============================
   	// Add RGB-D clouds
   	//============================
   	const std::map<int, Transform> & poses = stats.poses();
   	QMap<std::string, Transform> clouds = cloudViewer_->getAddedClouds();
   	for(std::map<int, Transform>::const_iterator iter = poses.begin(); iter!=poses.end(); ++iter)
   	{
   		if(!iter->second.isNull())
   		{
   			std::string cloudName = uFormat("cloud%d", iter->first);

   			// 3d point cloud
   			if(clouds.contains(cloudName))
   			{
   				// Update only if the pose has changed
   				Transform tCloud;
   				cloudViewer_->getPose(cloudName, tCloud);
   				if(tCloud.isNull() || iter->second != tCloud)
   				{
   					if(!cloudViewer_->updateCloudPose(cloudName, iter->second))
   					{
   						UERROR("Updating pose cloud %d failed!", iter->first);
   					}
   				}
   				cloudViewer_->setCloudVisibility(cloudName, true);
   			}
   			else if(uContains(stats.getSignatures(), iter->first))
   			{
   				Signature s = stats.getSignatures().at(iter->first);
   				s.sensorData().uncompressData(); // make sure data is uncompressed
   				// Add the new cloud
   				pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = util3d::cloudRGBFromSensorData(
   						s.sensorData(),
   					    4,     // decimation
   					    4.0f); // max depth
   				if(cloud->size())
   				{
   					if(!cloudViewer_->addOrUpdateCloud(cloudName, cloud, iter->second))
   					{
   						UERROR("Adding cloud %d to viewer failed!", iter->first);
   					}
   				}
   				else
   				{
   					UWARN("Empty cloud %d!", iter->first);
   				}
   			}
   		}
   		else
   		{
   			UWARN("Null pose for %d ?!?", iter->first);
   		}
   	}

   	//============================
   	// Add 3D graph (show all poses)
   	//============================
   	cloudViewer_->removeAllGraphs();
   	cloudViewer_->removeCloud("graph_nodes");
   	if(poses.size())
   	{
   		// Set graph
   		pcl::PointCloud<pcl::PointXYZ>::Ptr graph(new pcl::PointCloud<pcl::PointXYZ>);
   		pcl::PointCloud<pcl::PointXYZ>::Ptr graphNodes(new pcl::PointCloud<pcl::PointXYZ>);
   		for(std::map<int, Transform>::const_iterator iter=poses.begin(); iter!=poses.end(); ++iter)
   		{
   			graph->push_back(pcl::PointXYZ(iter->second.x(), iter->second.y(), iter->second.z()));
   		}
   		*graphNodes = *graph;


   		// add graph
   		cloudViewer_->addOrUpdateGraph("graph", graph, Qt::gray);
   		cloudViewer_->addOrUpdateCloud("graph_nodes", graphNodes, Transform::getIdentity(), Qt::green);
   		cloudViewer_->setCloudPointSize("graph_nodes", 5);
   	}

   	odometryCorrection_ = stats.mapCorrection();

   	cloudViewer_->update();

   	processingStatistics_ = false;
   }

   virtual void handleEvent(UEvent * event)
   {
   	if(event->getClassName().compare("RtabmapEvent") == 0)
   	{
   		RtabmapEvent * rtabmapEvent = (RtabmapEvent *)event;
   		const Statistics & stats = rtabmapEvent->getStats();
   		// Statistics must be processed in the Qt thread
   		if(this->isVisible())
   		{
   			QMetaObject::invokeMethod(this, "processStatistics", Q_ARG(rtabmap::Statistics, stats));
   		}
   	}
   	else if(event->getClassName().compare("OdometryEvent") == 0)
   	{
   		OdometryEvent * odomEvent = (OdometryEvent *)event;
   		// Odometry must be processed in the Qt thread
   		if(this->isVisible() &&
   		   lastOdometryProcessed_ &&
   		   !processingStatistics_)
   		{
   			lastOdometryProcessed_ = false; // if we receive too many odometry events!
   			QMetaObject::invokeMethod(this, "processOdometry", Q_ARG(rtabmap::OdometryEvent, *odomEvent));
   		}
   	}
   }

protected:
   CloudViewer * cloudViewer_;
   CameraThread * camera_;
   Transform lastOdomPose_;
   Transform odometryCorrection_;
   bool processingStatistics_;
   bool lastOdometryProcessed_;
};


#endif /* MAPBUILDER_H_ */

Build and run

In your directory:

$ cmake .
$ make
$ ./rgbd_example

You should see a window with the 3D map constructing. If the background turns red, that means that odometry cannot be computed: visit the Lost Odometry section on this page for more information.

Example without threads and events

The previous approach based on threads and events is preferred. However, if you want to see step-by-step how images are processed in the different modules, it would be easier to remove threading stuff. So I provide an example in which I don't use any threads and events. The code is here and the dataset used can be downloaded here in the stereo tutorial. It is also built when RTAB-Map is built from source, the name is rtabmap-noEventsExample.

Example (with unzipped stereo_20Hz.zip images):

./rtabmap-noEventsExample 20 2 10 stereo_20hz stereo_20Hz stereo_20hz/left stereo_20hz/right

Clone this wiki locally