This commit is contained in:
2022-06-21 09:16:15 +03:00
commit 96f206fca0
228 changed files with 113598 additions and 0 deletions

View File

@@ -0,0 +1,69 @@
%YAML:1.0
#--------------------------------------------------------------------------------------------
# Camera Parameters. Adjust them!
#--------------------------------------------------------------------------------------------
# Camera calibration and distortion parameters (OpenCV)
Camera.fx: 535.4
Camera.fy: 539.2
Camera.cx: 320.1
Camera.cy: 247.6
Camera.k1: 0.0
Camera.k2: 0.0
Camera.p1: 0.0
Camera.p2: 0.0
Camera.width: 640
Camera.height: 480
# Camera frames per second
Camera.fps: 30.0
# IR projector baseline times fx (aprox.)
Camera.bf: 40.0
# Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
Camera.RGB: 1
# Close/Far threshold. Baseline times.
ThDepth: 40.0
# Deptmap values factor
DepthMapFactor: 1.0
#--------------------------------------------------------------------------------------------
# ORB Parameters
#--------------------------------------------------------------------------------------------
# ORB Extractor: Number of features per image
ORBextractor.nFeatures: 1000
# ORB Extractor: Scale factor between levels in the scale pyramid
ORBextractor.scaleFactor: 1.2
# ORB Extractor: Number of levels in the scale pyramid
ORBextractor.nLevels: 8
# ORB Extractor: Fast threshold
# Image is divided in a grid. At each cell FAST are extracted imposing a minimum response.
# Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST
# You can lower these values if your images have low contrast
ORBextractor.iniThFAST: 20
ORBextractor.minThFAST: 7
#--------------------------------------------------------------------------------------------
# Viewer Parameters
#--------------------------------------------------------------------------------------------
Viewer.KeyFrameSize: 0.05
Viewer.KeyFrameLineWidth: 1
Viewer.GraphLineWidth: 0.9
Viewer.PointSize:2
Viewer.CameraSize: 0.08
Viewer.CameraLineWidth: 3
Viewer.ViewpointX: 0
Viewer.ViewpointY: -0.7
Viewer.ViewpointZ: -1.8
Viewer.ViewpointF: 500

View File

@@ -0,0 +1,97 @@
cmake_minimum_required(VERSION 2.4.6)
include($ENV{ROS_ROOT}/core/rosbuild/rosbuild.cmake)
rosbuild_init()
IF(NOT ROS_BUILD_TYPE)
SET(ROS_BUILD_TYPE Release)
ENDIF()
MESSAGE("Build type: " ${ROS_BUILD_TYPE})
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native")
# Check C++11 or C++0x support
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)
if(COMPILER_SUPPORTS_CXX11)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
add_definitions(-DCOMPILEDWITHC11)
message(STATUS "Using flag -std=c++11.")
elseif(COMPILER_SUPPORTS_CXX0X)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
add_definitions(-DCOMPILEDWITHC0X)
message(STATUS "Using flag -std=c++0x.")
else()
message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.")
endif()
LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../../../cmake_modules)
find_package(OpenCV 3.0 QUIET)
if(NOT OpenCV_FOUND)
find_package(OpenCV 2.4.3 QUIET)
if(NOT OpenCV_FOUND)
message(FATAL_ERROR "OpenCV > 2.4.3 not found.")
endif()
endif()
find_package(Eigen3 3.1.0 REQUIRED)
find_package(Pangolin REQUIRED)
include_directories(
${PROJECT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/../../../
${PROJECT_SOURCE_DIR}/../../../include
${Pangolin_INCLUDE_DIRS}
)
set(LIBS
${OpenCV_LIBS}
${EIGEN3_LIBS}
${Pangolin_LIBRARIES}
${PROJECT_SOURCE_DIR}/../../../Thirdparty/DBoW2/lib/libDBoW2.so
${PROJECT_SOURCE_DIR}/../../../Thirdparty/g2o/lib/libg2o.so
${PROJECT_SOURCE_DIR}/../../../lib/libORB_SLAM2.so
)
# Node for monocular camera
rosbuild_add_executable(Mono
src/ros_mono.cc
)
target_link_libraries(Mono
${LIBS}
)
# Node for monocular camera (Augmented Reality Demo)
rosbuild_add_executable(MonoAR
src/AR/ros_mono_ar.cc
src/AR/ViewerAR.h
src/AR/ViewerAR.cc
)
target_link_libraries(MonoAR
${LIBS}
)
# Node for stereo camera
rosbuild_add_executable(Stereo
src/ros_stereo.cc
)
target_link_libraries(Stereo
${LIBS}
)
# Node for RGB-D camera
rosbuild_add_executable(RGBD
src/ros_rgbd.cc
)
target_link_libraries(RGBD
${LIBS}
)

View File

@@ -0,0 +1,19 @@
<package>
<description brief="ORB_SLAM2">
ORB_SLAM2
</description>
<author>Raul Mur-Artal</author>
<license>GPLv3</license>
<depend package="roscpp"/>
<depend package="tf"/>
<depend package="sensor_msgs"/>
<depend package="image_transport"/>
<depend package="cv_bridge"/>
</package>

View File

@@ -0,0 +1,642 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ViewerAR.h"
#include <opencv2/highgui/highgui.hpp>
#include <mutex>
#include <thread>
#include <cstdlib>
using namespace std;
namespace ORB_SLAM2
{
const float eps = 1e-4;
cv::Mat ExpSO3(const float &x, const float &y, const float &z)
{
cv::Mat I = cv::Mat::eye(3,3,CV_32F);
const float d2 = x*x+y*y+z*z;
const float d = sqrt(d2);
cv::Mat W = (cv::Mat_<float>(3,3) << 0, -z, y,
z, 0, -x,
-y, x, 0);
if(d<eps)
return (I + W + 0.5f*W*W);
else
return (I + W*sin(d)/d + W*W*(1.0f-cos(d))/d2);
}
cv::Mat ExpSO3(const cv::Mat &v)
{
return ExpSO3(v.at<float>(0),v.at<float>(1),v.at<float>(2));
}
ViewerAR::ViewerAR(){}
void ViewerAR::Run()
{
int w,h,wui;
cv::Mat im, Tcw;
int status;
vector<cv::KeyPoint> vKeys;
vector<MapPoint*> vMPs;
while(1)
{
GetImagePose(im,Tcw,status,vKeys,vMPs);
if(im.empty())
cv::waitKey(mT);
else
{
w = im.cols;
h = im.rows;
break;
}
}
wui=200;
pangolin::CreateWindowAndBind("Viewer",w+wui,h);
glEnable(GL_DEPTH_TEST);
glEnable (GL_BLEND);
pangolin::CreatePanel("menu").SetBounds(0.0,1.0,0.0,pangolin::Attach::Pix(wui));
pangolin::Var<bool> menu_detectplane("menu.Insert Cube",false,false);
pangolin::Var<bool> menu_clear("menu.Clear All",false,false);
pangolin::Var<bool> menu_drawim("menu.Draw Image",true,true);
pangolin::Var<bool> menu_drawcube("menu.Draw Cube",true,true);
pangolin::Var<float> menu_cubesize("menu. Cube Size",0.05,0.01,0.3);
pangolin::Var<bool> menu_drawgrid("menu.Draw Grid",true,true);
pangolin::Var<int> menu_ngrid("menu. Grid Elements",3,1,10);
pangolin::Var<float> menu_sizegrid("menu. Element Size",0.05,0.01,0.3);
pangolin::Var<bool> menu_drawpoints("menu.Draw Points",false,true);
pangolin::Var<bool> menu_LocalizationMode("menu.Localization Mode",false,true);
bool bLocalizationMode = false;
pangolin::View& d_image = pangolin::Display("image")
.SetBounds(0,1.0f,pangolin::Attach::Pix(wui),1.0f,(float)w/h)
.SetLock(pangolin::LockLeft, pangolin::LockTop);
pangolin::GlTexture imageTexture(w,h,GL_RGB,false,0,GL_RGB,GL_UNSIGNED_BYTE);
pangolin::OpenGlMatrixSpec P = pangolin::ProjectionMatrixRDF_TopLeft(w,h,fx,fy,cx,cy,0.001,1000);
vector<Plane*> vpPlane;
while(1)
{
if(menu_LocalizationMode && !bLocalizationMode)
{
mpSystem->ActivateLocalizationMode();
bLocalizationMode = true;
}
else if(!menu_LocalizationMode && bLocalizationMode)
{
mpSystem->DeactivateLocalizationMode();
bLocalizationMode = false;
}
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Activate camera view
d_image.Activate();
glColor3f(1.0,1.0,1.0);
// Get last image and its computed pose from SLAM
GetImagePose(im,Tcw,status,vKeys,vMPs);
// Add text to image
PrintStatus(status,bLocalizationMode,im);
if(menu_drawpoints)
DrawTrackedPoints(vKeys,vMPs,im);
// Draw image
if(menu_drawim)
DrawImageTexture(imageTexture,im);
glClear(GL_DEPTH_BUFFER_BIT);
// Load camera projection
glMatrixMode(GL_PROJECTION);
P.Load();
glMatrixMode(GL_MODELVIEW);
// Load camera pose
LoadCameraPose(Tcw);
// Draw virtual things
if(status==2)
{
if(menu_clear)
{
if(!vpPlane.empty())
{
for(size_t i=0; i<vpPlane.size(); i++)
{
delete vpPlane[i];
}
vpPlane.clear();
cout << "All cubes erased!" << endl;
}
menu_clear = false;
}
if(menu_detectplane)
{
Plane* pPlane = DetectPlane(Tcw,vMPs,50);
if(pPlane)
{
cout << "New virtual cube inserted!" << endl;
vpPlane.push_back(pPlane);
}
else
{
cout << "No plane detected. Point the camera to a planar region." << endl;
}
menu_detectplane = false;
}
if(!vpPlane.empty())
{
// Recompute plane if there has been a loop closure or global BA
// In localization mode, map is not updated so we do not need to recompute
bool bRecompute = false;
if(!bLocalizationMode)
{
if(mpSystem->MapChanged())
{
cout << "Map changed. All virtual elements are recomputed!" << endl;
bRecompute = true;
}
}
for(size_t i=0; i<vpPlane.size(); i++)
{
Plane* pPlane = vpPlane[i];
if(pPlane)
{
if(bRecompute)
{
pPlane->Recompute();
}
glPushMatrix();
pPlane->glTpw.Multiply();
// Draw cube
if(menu_drawcube)
{
DrawCube(menu_cubesize);
}
// Draw grid plane
if(menu_drawgrid)
{
DrawPlane(menu_ngrid,menu_sizegrid);
}
glPopMatrix();
}
}
}
}
pangolin::FinishFrame();
usleep(mT*1000);
}
}
void ViewerAR::SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status, const vector<cv::KeyPoint> &vKeys, const vector<ORB_SLAM2::MapPoint*> &vMPs)
{
unique_lock<mutex> lock(mMutexPoseImage);
mImage = im.clone();
mTcw = Tcw.clone();
mStatus = status;
mvKeys = vKeys;
mvMPs = vMPs;
}
void ViewerAR::GetImagePose(cv::Mat &im, cv::Mat &Tcw, int &status, std::vector<cv::KeyPoint> &vKeys, std::vector<MapPoint*> &vMPs)
{
unique_lock<mutex> lock(mMutexPoseImage);
im = mImage.clone();
Tcw = mTcw.clone();
status = mStatus;
vKeys = mvKeys;
vMPs = mvMPs;
}
void ViewerAR::LoadCameraPose(const cv::Mat &Tcw)
{
if(!Tcw.empty())
{
pangolin::OpenGlMatrix M;
M.m[0] = Tcw.at<float>(0,0);
M.m[1] = Tcw.at<float>(1,0);
M.m[2] = Tcw.at<float>(2,0);
M.m[3] = 0.0;
M.m[4] = Tcw.at<float>(0,1);
M.m[5] = Tcw.at<float>(1,1);
M.m[6] = Tcw.at<float>(2,1);
M.m[7] = 0.0;
M.m[8] = Tcw.at<float>(0,2);
M.m[9] = Tcw.at<float>(1,2);
M.m[10] = Tcw.at<float>(2,2);
M.m[11] = 0.0;
M.m[12] = Tcw.at<float>(0,3);
M.m[13] = Tcw.at<float>(1,3);
M.m[14] = Tcw.at<float>(2,3);
M.m[15] = 1.0;
M.Load();
}
}
void ViewerAR::PrintStatus(const int &status, const bool &bLocMode, cv::Mat &im)
{
if(!bLocMode)
{
switch(status)
{
case 1: {AddTextToImage("SLAM NOT INITIALIZED",im,255,0,0); break;}
case 2: {AddTextToImage("SLAM ON",im,0,255,0); break;}
case 3: {AddTextToImage("SLAM LOST",im,255,0,0); break;}
}
}
else
{
switch(status)
{
case 1: {AddTextToImage("SLAM NOT INITIALIZED",im,255,0,0); break;}
case 2: {AddTextToImage("LOCALIZATION ON",im,0,255,0); break;}
case 3: {AddTextToImage("LOCALIZATION LOST",im,255,0,0); break;}
}
}
}
void ViewerAR::AddTextToImage(const string &s, cv::Mat &im, const int r, const int g, const int b)
{
int l = 10;
//imText.rowRange(im.rows-imText.rows,imText.rows) = cv::Mat::zeros(textSize.height+10,im.cols,im.type());
cv::putText(im,s,cv::Point(l,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l-1,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l+1,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l-1,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l+1,im.rows-(l-1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l-1,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l+1,im.rows-(l+1)),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(255,255,255),2,8);
cv::putText(im,s,cv::Point(l,im.rows-l),cv::FONT_HERSHEY_PLAIN,1.5,cv::Scalar(r,g,b),2,8);
}
void ViewerAR::DrawImageTexture(pangolin::GlTexture &imageTexture, cv::Mat &im)
{
if(!im.empty())
{
imageTexture.Upload(im.data,GL_RGB,GL_UNSIGNED_BYTE);
imageTexture.RenderToViewportFlipY();
}
}
void ViewerAR::DrawCube(const float &size,const float x, const float y, const float z)
{
pangolin::OpenGlMatrix M = pangolin::OpenGlMatrix::Translate(-x,-size-y,-z);
glPushMatrix();
M.Multiply();
pangolin::glDrawColouredCube(-size,size);
glPopMatrix();
}
void ViewerAR::DrawPlane(Plane *pPlane, int ndivs, float ndivsize)
{
glPushMatrix();
pPlane->glTpw.Multiply();
DrawPlane(ndivs,ndivsize);
glPopMatrix();
}
void ViewerAR::DrawPlane(int ndivs, float ndivsize)
{
// Plane parallel to x-z at origin with normal -y
const float minx = -ndivs*ndivsize;
const float minz = -ndivs*ndivsize;
const float maxx = ndivs*ndivsize;
const float maxz = ndivs*ndivsize;
glLineWidth(2);
glColor3f(0.7f,0.7f,1.0f);
glBegin(GL_LINES);
for(int n = 0; n<=2*ndivs; n++)
{
glVertex3f(minx+ndivsize*n,0,minz);
glVertex3f(minx+ndivsize*n,0,maxz);
glVertex3f(minx,0,minz+ndivsize*n);
glVertex3f(maxx,0,minz+ndivsize*n);
}
glEnd();
}
void ViewerAR::DrawTrackedPoints(const std::vector<cv::KeyPoint> &vKeys, const std::vector<MapPoint *> &vMPs, cv::Mat &im)
{
const int N = vKeys.size();
for(int i=0; i<N; i++)
{
if(vMPs[i])
{
cv::circle(im,vKeys[i].pt,1,cv::Scalar(0,255,0),-1);
}
}
}
Plane* ViewerAR::DetectPlane(const cv::Mat Tcw, const std::vector<MapPoint*> &vMPs, const int iterations)
{
// Retrieve 3D points
vector<cv::Mat> vPoints;
vPoints.reserve(vMPs.size());
vector<MapPoint*> vPointMP;
vPointMP.reserve(vMPs.size());
for(size_t i=0; i<vMPs.size(); i++)
{
MapPoint* pMP=vMPs[i];
if(pMP)
{
if(pMP->Observations()>5)
{
vPoints.push_back(pMP->GetWorldPos());
vPointMP.push_back(pMP);
}
}
}
const int N = vPoints.size();
if(N<50)
return NULL;
// Indices for minimum set selection
vector<size_t> vAllIndices;
vAllIndices.reserve(N);
vector<size_t> vAvailableIndices;
for(int i=0; i<N; i++)
{
vAllIndices.push_back(i);
}
float bestDist = 1e10;
vector<float> bestvDist;
//RANSAC
for(int n=0; n<iterations; n++)
{
vAvailableIndices = vAllIndices;
cv::Mat A(3,4,CV_32F);
A.col(3) = cv::Mat::ones(3,1,CV_32F);
// Get min set of points
for(short i = 0; i < 3; ++i)
{
int randi = DUtils::Random::RandomInt(0, vAvailableIndices.size()-1);
int idx = vAvailableIndices[randi];
A.row(i).colRange(0,3) = vPoints[idx].t();
vAvailableIndices[randi] = vAvailableIndices.back();
vAvailableIndices.pop_back();
}
cv::Mat u,w,vt;
cv::SVDecomp(A,w,u,vt,cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
const float a = vt.at<float>(3,0);
const float b = vt.at<float>(3,1);
const float c = vt.at<float>(3,2);
const float d = vt.at<float>(3,3);
vector<float> vDistances(N,0);
const float f = 1.0f/sqrt(a*a+b*b+c*c+d*d);
for(int i=0; i<N; i++)
{
vDistances[i] = fabs(vPoints[i].at<float>(0)*a+vPoints[i].at<float>(1)*b+vPoints[i].at<float>(2)*c+d)*f;
}
vector<float> vSorted = vDistances;
sort(vSorted.begin(),vSorted.end());
int nth = max((int)(0.2*N),20);
const float medianDist = vSorted[nth];
if(medianDist<bestDist)
{
bestDist = medianDist;
bestvDist = vDistances;
}
}
// Compute threshold inlier/outlier
const float th = 1.4*bestDist;
vector<bool> vbInliers(N,false);
int nInliers = 0;
for(int i=0; i<N; i++)
{
if(bestvDist[i]<th)
{
nInliers++;
vbInliers[i]=true;
}
}
vector<MapPoint*> vInlierMPs(nInliers,NULL);
int nin = 0;
for(int i=0; i<N; i++)
{
if(vbInliers[i])
{
vInlierMPs[nin] = vPointMP[i];
nin++;
}
}
return new Plane(vInlierMPs,Tcw);
}
Plane::Plane(const std::vector<MapPoint *> &vMPs, const cv::Mat &Tcw):mvMPs(vMPs),mTcw(Tcw.clone())
{
rang = -3.14f/2+((float)rand()/RAND_MAX)*3.14f;
Recompute();
}
void Plane::Recompute()
{
const int N = mvMPs.size();
// Recompute plane with all points
cv::Mat A = cv::Mat(N,4,CV_32F);
A.col(3) = cv::Mat::ones(N,1,CV_32F);
o = cv::Mat::zeros(3,1,CV_32F);
int nPoints = 0;
for(int i=0; i<N; i++)
{
MapPoint* pMP = mvMPs[i];
if(!pMP->isBad())
{
cv::Mat Xw = pMP->GetWorldPos();
o+=Xw;
A.row(nPoints).colRange(0,3) = Xw.t();
nPoints++;
}
}
A.resize(nPoints);
cv::Mat u,w,vt;
cv::SVDecomp(A,w,u,vt,cv::SVD::MODIFY_A | cv::SVD::FULL_UV);
float a = vt.at<float>(3,0);
float b = vt.at<float>(3,1);
float c = vt.at<float>(3,2);
o = o*(1.0f/nPoints);
const float f = 1.0f/sqrt(a*a+b*b+c*c);
// Compute XC just the first time
if(XC.empty())
{
cv::Mat Oc = -mTcw.colRange(0,3).rowRange(0,3).t()*mTcw.rowRange(0,3).col(3);
XC = Oc-o;
}
if((XC.at<float>(0)*a+XC.at<float>(1)*b+XC.at<float>(2)*c)>0)
{
a=-a;
b=-b;
c=-c;
}
const float nx = a*f;
const float ny = b*f;
const float nz = c*f;
n = (cv::Mat_<float>(3,1)<<nx,ny,nz);
cv::Mat up = (cv::Mat_<float>(3,1) << 0.0f, 1.0f, 0.0f);
cv::Mat v = up.cross(n);
const float sa = cv::norm(v);
const float ca = up.dot(n);
const float ang = atan2(sa,ca);
Tpw = cv::Mat::eye(4,4,CV_32F);
Tpw.rowRange(0,3).colRange(0,3) = ExpSO3(v*ang/sa)*ExpSO3(up*rang);
o.copyTo(Tpw.col(3).rowRange(0,3));
glTpw.m[0] = Tpw.at<float>(0,0);
glTpw.m[1] = Tpw.at<float>(1,0);
glTpw.m[2] = Tpw.at<float>(2,0);
glTpw.m[3] = 0.0;
glTpw.m[4] = Tpw.at<float>(0,1);
glTpw.m[5] = Tpw.at<float>(1,1);
glTpw.m[6] = Tpw.at<float>(2,1);
glTpw.m[7] = 0.0;
glTpw.m[8] = Tpw.at<float>(0,2);
glTpw.m[9] = Tpw.at<float>(1,2);
glTpw.m[10] = Tpw.at<float>(2,2);
glTpw.m[11] = 0.0;
glTpw.m[12] = Tpw.at<float>(0,3);
glTpw.m[13] = Tpw.at<float>(1,3);
glTpw.m[14] = Tpw.at<float>(2,3);
glTpw.m[15] = 1.0;
}
Plane::Plane(const float &nx, const float &ny, const float &nz, const float &ox, const float &oy, const float &oz)
{
n = (cv::Mat_<float>(3,1)<<nx,ny,nz);
o = (cv::Mat_<float>(3,1)<<ox,oy,oz);
cv::Mat up = (cv::Mat_<float>(3,1) << 0.0f, 1.0f, 0.0f);
cv::Mat v = up.cross(n);
const float s = cv::norm(v);
const float c = up.dot(n);
const float a = atan2(s,c);
Tpw = cv::Mat::eye(4,4,CV_32F);
const float rang = -3.14f/2+((float)rand()/RAND_MAX)*3.14f;
cout << rang;
Tpw.rowRange(0,3).colRange(0,3) = ExpSO3(v*a/s)*ExpSO3(up*rang);
o.copyTo(Tpw.col(3).rowRange(0,3));
glTpw.m[0] = Tpw.at<float>(0,0);
glTpw.m[1] = Tpw.at<float>(1,0);
glTpw.m[2] = Tpw.at<float>(2,0);
glTpw.m[3] = 0.0;
glTpw.m[4] = Tpw.at<float>(0,1);
glTpw.m[5] = Tpw.at<float>(1,1);
glTpw.m[6] = Tpw.at<float>(2,1);
glTpw.m[7] = 0.0;
glTpw.m[8] = Tpw.at<float>(0,2);
glTpw.m[9] = Tpw.at<float>(1,2);
glTpw.m[10] = Tpw.at<float>(2,2);
glTpw.m[11] = 0.0;
glTpw.m[12] = Tpw.at<float>(0,3);
glTpw.m[13] = Tpw.at<float>(1,3);
glTpw.m[14] = Tpw.at<float>(2,3);
glTpw.m[15] = 1.0;
}
}

View File

@@ -0,0 +1,120 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef VIEWERAR_H
#define VIEWERAR_H
#include <mutex>
#include <opencv2/core/core.hpp>
#include <pangolin/pangolin.h>
#include <string>
#include"../../../include/System.h"
namespace ORB_SLAM2
{
class Plane
{
public:
Plane(const std::vector<MapPoint*> &vMPs, const cv::Mat &Tcw);
Plane(const float &nx, const float &ny, const float &nz, const float &ox, const float &oy, const float &oz);
void Recompute();
//normal
cv::Mat n;
//origin
cv::Mat o;
//arbitrary orientation along normal
float rang;
//transformation from world to the plane
cv::Mat Tpw;
pangolin::OpenGlMatrix glTpw;
//MapPoints that define the plane
std::vector<MapPoint*> mvMPs;
//camera pose when the plane was first observed (to compute normal direction)
cv::Mat mTcw, XC;
};
class ViewerAR
{
public:
ViewerAR();
void SetFPS(const float fps){
mFPS = fps;
mT=1e3/fps;
}
void SetSLAM(ORB_SLAM2::System* pSystem){
mpSystem = pSystem;
}
// Main thread function.
void Run();
void SetCameraCalibration(const float &fx_, const float &fy_, const float &cx_, const float &cy_){
fx = fx_; fy = fy_; cx = cx_; cy = cy_;
}
void SetImagePose(const cv::Mat &im, const cv::Mat &Tcw, const int &status,
const std::vector<cv::KeyPoint> &vKeys, const std::vector<MapPoint*> &vMPs);
void GetImagePose(cv::Mat &im, cv::Mat &Tcw, int &status,
std::vector<cv::KeyPoint> &vKeys, std::vector<MapPoint*> &vMPs);
private:
//SLAM
ORB_SLAM2::System* mpSystem;
void PrintStatus(const int &status, const bool &bLocMode, cv::Mat &im);
void AddTextToImage(const std::string &s, cv::Mat &im, const int r=0, const int g=0, const int b=0);
void LoadCameraPose(const cv::Mat &Tcw);
void DrawImageTexture(pangolin::GlTexture &imageTexture, cv::Mat &im);
void DrawCube(const float &size, const float x=0, const float y=0, const float z=0);
void DrawPlane(int ndivs, float ndivsize);
void DrawPlane(Plane* pPlane, int ndivs, float ndivsize);
void DrawTrackedPoints(const std::vector<cv::KeyPoint> &vKeys, const std::vector<MapPoint*> &vMPs, cv::Mat &im);
Plane* DetectPlane(const cv::Mat Tcw, const std::vector<MapPoint*> &vMPs, const int iterations=50);
// frame rate
float mFPS, mT;
float fx,fy,cx,cy;
// Last processed image and computed pose by the SLAM
std::mutex mMutexPoseImage;
cv::Mat mTcw;
cv::Mat mImage;
int mStatus;
std::vector<cv::KeyPoint> mvKeys;
std::vector<MapPoint*> mvMPs;
};
}
#endif // VIEWERAR_H

View File

@@ -0,0 +1,169 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include<opencv2/core/core.hpp>
#include<opencv2/imgproc/imgproc.hpp>
#include"../../../include/System.h"
#include"ViewerAR.h"
using namespace std;
ORB_SLAM2::ViewerAR viewerAR;
bool bRGB = true;
cv::Mat K;
cv::Mat DistCoef;
class ImageGrabber
{
public:
ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
void GrabImage(const sensor_msgs::ImageConstPtr& msg);
ORB_SLAM2::System* mpSLAM;
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "Mono");
ros::start();
if(argc != 3)
{
cerr << endl << "Usage: rosrun ORB_SLAM2 Mono path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,false);
cout << endl << endl;
cout << "-----------------------" << endl;
cout << "Augmented Reality Demo" << endl;
cout << "1) Translate the camera to initialize SLAM." << endl;
cout << "2) Look at a planar region and translate the camera." << endl;
cout << "3) Press Insert Cube to place a virtual cube in the plane. " << endl;
cout << endl;
cout << "You can place several cubes in different planes." << endl;
cout << "-----------------------" << endl;
cout << endl;
viewerAR.SetSLAM(&SLAM);
ImageGrabber igb(&SLAM);
ros::NodeHandle nodeHandler;
ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 1, &ImageGrabber::GrabImage,&igb);
cv::FileStorage fSettings(argv[2], cv::FileStorage::READ);
bRGB = static_cast<bool>((int)fSettings["Camera.RGB"]);
float fps = fSettings["Camera.fps"];
viewerAR.SetFPS(fps);
float fx = fSettings["Camera.fx"];
float fy = fSettings["Camera.fy"];
float cx = fSettings["Camera.cx"];
float cy = fSettings["Camera.cy"];
viewerAR.SetCameraCalibration(fx,fy,cx,cy);
K = cv::Mat::eye(3,3,CV_32F);
K.at<float>(0,0) = fx;
K.at<float>(1,1) = fy;
K.at<float>(0,2) = cx;
K.at<float>(1,2) = cy;
DistCoef = cv::Mat::zeros(4,1,CV_32F);
DistCoef.at<float>(0) = fSettings["Camera.k1"];
DistCoef.at<float>(1) = fSettings["Camera.k2"];
DistCoef.at<float>(2) = fSettings["Camera.p1"];
DistCoef.at<float>(3) = fSettings["Camera.p2"];
const float k3 = fSettings["Camera.k3"];
if(k3!=0)
{
DistCoef.resize(5);
DistCoef.at<float>(4) = k3;
}
thread tViewer = thread(&ORB_SLAM2::ViewerAR::Run,&viewerAR);
ros::spin();
// Stop all threads
SLAM.Shutdown();
// Save camera trajectory
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
ros::shutdown();
return 0;
}
void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg)
{
// Copy the ros image message to cv::Mat.
cv_bridge::CvImageConstPtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvShare(msg);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
cv::Mat im = cv_ptr->image.clone();
cv::Mat imu;
cv::Mat Tcw = mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec());
int state = mpSLAM->GetTrackingState();
vector<ORB_SLAM2::MapPoint*> vMPs = mpSLAM->GetTrackedMapPoints();
vector<cv::KeyPoint> vKeys = mpSLAM->GetTrackedKeyPointsUn();
cv::undistort(im,imu,K,DistCoef);
if(bRGB)
viewerAR.SetImagePose(imu,Tcw,state,vKeys,vMPs);
else
{
cv::cvtColor(imu,imu,CV_RGB2BGR);
viewerAR.SetImagePose(imu,Tcw,state,vKeys,vMPs);
}
}

View File

@@ -0,0 +1,96 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include<opencv2/core/core.hpp>
#include"../../../include/System.h"
using namespace std;
class ImageGrabber
{
public:
ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
void GrabImage(const sensor_msgs::ImageConstPtr& msg);
ORB_SLAM2::System* mpSLAM;
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "Mono");
ros::start();
if(argc != 3)
{
cerr << endl << "Usage: rosrun ORB_SLAM2 Mono path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,true);
ImageGrabber igb(&SLAM);
ros::NodeHandle nodeHandler;
ros::Subscriber sub = nodeHandler.subscribe("/camera/image_raw", 1, &ImageGrabber::GrabImage,&igb);
ros::spin();
// Stop all threads
SLAM.Shutdown();
// Save camera trajectory
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
ros::shutdown();
return 0;
}
void ImageGrabber::GrabImage(const sensor_msgs::ImageConstPtr& msg)
{
// Copy the ros image message to cv::Mat.
cv_bridge::CvImageConstPtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvShare(msg);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
mpSLAM->TrackMonocular(cv_ptr->image,cv_ptr->header.stamp.toSec());
}

View File

@@ -0,0 +1,115 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <message_filters/subscriber.h>
#include <message_filters/time_synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include<opencv2/core/core.hpp>
#include"../../../include/System.h"
using namespace std;
class ImageGrabber
{
public:
ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
void GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const sensor_msgs::ImageConstPtr& msgD);
ORB_SLAM2::System* mpSLAM;
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "RGBD");
ros::start();
if(argc != 3)
{
cerr << endl << "Usage: rosrun ORB_SLAM2 RGBD path_to_vocabulary path_to_settings" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::RGBD,true);
ImageGrabber igb(&SLAM);
ros::NodeHandle nh;
message_filters::Subscriber<sensor_msgs::Image> rgb_sub(nh, "/camera/rgb/image_raw", 1);
message_filters::Subscriber<sensor_msgs::Image> depth_sub(nh, "camera/depth_registered/image_raw", 1);
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;
message_filters::Synchronizer<sync_pol> sync(sync_pol(10), rgb_sub,depth_sub);
sync.registerCallback(boost::bind(&ImageGrabber::GrabRGBD,&igb,_1,_2));
ros::spin();
// Stop all threads
SLAM.Shutdown();
// Save camera trajectory
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
ros::shutdown();
return 0;
}
void ImageGrabber::GrabRGBD(const sensor_msgs::ImageConstPtr& msgRGB,const sensor_msgs::ImageConstPtr& msgD)
{
// Copy the ros image message to cv::Mat.
cv_bridge::CvImageConstPtr cv_ptrRGB;
try
{
cv_ptrRGB = cv_bridge::toCvShare(msgRGB);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
cv_bridge::CvImageConstPtr cv_ptrD;
try
{
cv_ptrD = cv_bridge::toCvShare(msgD);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
mpSLAM->TrackRGBD(cv_ptrRGB->image,cv_ptrD->image,cv_ptrRGB->header.stamp.toSec());
}

View File

@@ -0,0 +1,172 @@
/**
* This file is part of ORB-SLAM2.
*
* Copyright (C) 2014-2016 Raúl Mur-Artal <raulmur at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/raulmur/ORB_SLAM2>
*
* ORB-SLAM2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ORB-SLAM2. If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include<ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <message_filters/subscriber.h>
#include <message_filters/time_synchronizer.h>
#include <message_filters/sync_policies/approximate_time.h>
#include<opencv2/core/core.hpp>
#include"../../../include/System.h"
using namespace std;
class ImageGrabber
{
public:
ImageGrabber(ORB_SLAM2::System* pSLAM):mpSLAM(pSLAM){}
void GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight);
ORB_SLAM2::System* mpSLAM;
bool do_rectify;
cv::Mat M1l,M2l,M1r,M2r;
};
int main(int argc, char **argv)
{
ros::init(argc, argv, "RGBD");
ros::start();
if(argc != 4)
{
cerr << endl << "Usage: rosrun ORB_SLAM2 Stereo path_to_vocabulary path_to_settings do_rectify" << endl;
ros::shutdown();
return 1;
}
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::STEREO,true);
ImageGrabber igb(&SLAM);
stringstream ss(argv[3]);
ss >> boolalpha >> igb.do_rectify;
if(igb.do_rectify)
{
// Load settings related to stereo calibration
cv::FileStorage fsSettings(argv[2], cv::FileStorage::READ);
if(!fsSettings.isOpened())
{
cerr << "ERROR: Wrong path to settings" << endl;
return -1;
}
cv::Mat K_l, K_r, P_l, P_r, R_l, R_r, D_l, D_r;
fsSettings["LEFT.K"] >> K_l;
fsSettings["RIGHT.K"] >> K_r;
fsSettings["LEFT.P"] >> P_l;
fsSettings["RIGHT.P"] >> P_r;
fsSettings["LEFT.R"] >> R_l;
fsSettings["RIGHT.R"] >> R_r;
fsSettings["LEFT.D"] >> D_l;
fsSettings["RIGHT.D"] >> D_r;
int rows_l = fsSettings["LEFT.height"];
int cols_l = fsSettings["LEFT.width"];
int rows_r = fsSettings["RIGHT.height"];
int cols_r = fsSettings["RIGHT.width"];
if(K_l.empty() || K_r.empty() || P_l.empty() || P_r.empty() || R_l.empty() || R_r.empty() || D_l.empty() || D_r.empty() ||
rows_l==0 || rows_r==0 || cols_l==0 || cols_r==0)
{
cerr << "ERROR: Calibration parameters to rectify stereo are missing!" << endl;
return -1;
}
cv::initUndistortRectifyMap(K_l,D_l,R_l,P_l.rowRange(0,3).colRange(0,3),cv::Size(cols_l,rows_l),CV_32F,igb.M1l,igb.M2l);
cv::initUndistortRectifyMap(K_r,D_r,R_r,P_r.rowRange(0,3).colRange(0,3),cv::Size(cols_r,rows_r),CV_32F,igb.M1r,igb.M2r);
}
ros::NodeHandle nh;
message_filters::Subscriber<sensor_msgs::Image> left_sub(nh, "/camera/left/image_raw", 1);
message_filters::Subscriber<sensor_msgs::Image> right_sub(nh, "camera/right/image_raw", 1);
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;
message_filters::Synchronizer<sync_pol> sync(sync_pol(10), left_sub,right_sub);
sync.registerCallback(boost::bind(&ImageGrabber::GrabStereo,&igb,_1,_2));
ros::spin();
// Stop all threads
SLAM.Shutdown();
// Save camera trajectory
SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory_TUM_Format.txt");
SLAM.SaveTrajectoryTUM("FrameTrajectory_TUM_Format.txt");
SLAM.SaveTrajectoryKITTI("FrameTrajectory_KITTI_Format.txt");
ros::shutdown();
return 0;
}
void ImageGrabber::GrabStereo(const sensor_msgs::ImageConstPtr& msgLeft,const sensor_msgs::ImageConstPtr& msgRight)
{
// Copy the ros image message to cv::Mat.
cv_bridge::CvImageConstPtr cv_ptrLeft;
try
{
cv_ptrLeft = cv_bridge::toCvShare(msgLeft);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
cv_bridge::CvImageConstPtr cv_ptrRight;
try
{
cv_ptrRight = cv_bridge::toCvShare(msgRight);
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
if(do_rectify)
{
cv::Mat imLeft, imRight;
cv::remap(cv_ptrLeft->image,imLeft,M1l,M2l,cv::INTER_LINEAR);
cv::remap(cv_ptrRight->image,imRight,M1r,M2r,cv::INTER_LINEAR);
mpSLAM->TrackStereo(imLeft,imRight,cv_ptrLeft->header.stamp.toSec());
}
else
{
mpSLAM->TrackStereo(cv_ptrLeft->image,cv_ptrRight->image,cv_ptrLeft->header.stamp.toSec());
}
}