This commit is contained in:
PodmogilnyjIvan
2021-12-03 03:34:31 -08:00
commit ff4acf84be
542 changed files with 136810 additions and 0 deletions

134
include/Atlas.h Normal file
View File

@@ -0,0 +1,134 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ATLAS_H
#define ATLAS_H
#include "Map.h"
#include "MapPoint.h"
#include "KeyFrame.h"
#include "GeometricCamera.h"
#include "Pinhole.h"
#include "KannalaBrandt8.h"
#include <set>
#include <mutex>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/export.hpp>
namespace ORB_SLAM3
{
class Viewer;
class Map;
class MapPoint;
class KeyFrame;
class KeyFrameDatabase;
class Frame;
class KannalaBrandt8;
class Pinhole;
class Atlas
{
public:
Atlas();
Atlas(int initKFid); // When its initialization the first map is created
~Atlas();
void CreateNewMap();
void ChangeMap(Map* pMap);
unsigned long int GetLastInitKFid();
void SetViewer(Viewer* pViewer);
// Method for change components in the current map
void AddKeyFrame(KeyFrame* pKF);
void AddMapPoint(MapPoint* pMP);
void AddCamera(GeometricCamera* pCam);
/* All methods without Map pointer work on current map */
void SetReferenceMapPoints(const std::vector<MapPoint*> &vpMPs);
void InformNewBigChange();
int GetLastBigChangeIdx();
long unsigned int MapPointsInMap();
long unsigned KeyFramesInMap();
// Method for get data in current map
std::vector<KeyFrame*> GetAllKeyFrames();
std::vector<MapPoint*> GetAllMapPoints();
std::vector<MapPoint*> GetReferenceMapPoints();
vector<Map*> GetAllMaps();
int CountMaps();
void clearMap();
void clearAtlas();
Map* GetCurrentMap();
void SetMapBad(Map* pMap);
void RemoveBadMaps();
bool isInertial();
void SetInertialSensor();
void SetImuInitialized();
bool isImuInitialized();
void SetKeyFrameDababase(KeyFrameDatabase* pKFDB);
KeyFrameDatabase* GetKeyFrameDatabase();
void SetORBVocabulary(ORBVocabulary* pORBVoc);
ORBVocabulary* GetORBVocabulary();
long unsigned int GetNumLivedKF();
long unsigned int GetNumLivedMP();
protected:
std::set<Map*> mspMaps;
std::set<Map*> mspBadMaps;
Map* mpCurrentMap;
std::vector<GeometricCamera*> mvpCameras;
std::vector<KannalaBrandt8*> mvpBackupCamKan;
std::vector<Pinhole*> mvpBackupCamPin;
std::mutex mMutexAtlas;
unsigned long int mnLastInitKFidMap;
Viewer* mpViewer;
bool mHasViewer;
// Class references for the map reconstruction from the save file
KeyFrameDatabase* mpKeyFrameDB;
ORBVocabulary* mpORBVocabulary;
}; // class Atlas
} // namespace ORB_SLAM3
#endif // ATLAS_H

View File

@@ -0,0 +1,99 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CAMERAMODELS_GEOMETRICCAMERA_H
#define CAMERAMODELS_GEOMETRICCAMERA_H
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/access.hpp>
#include <boost/serialization/base_object.hpp>
#include <boost/serialization/export.hpp>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/assume_abstract.hpp>
#include <Eigen/Geometry>
namespace ORB_SLAM3 {
class GeometricCamera {
public:
GeometricCamera() {}
GeometricCamera(const std::vector<float> &_vParameters) : mvParameters(_vParameters) {}
~GeometricCamera() {}
virtual cv::Point2f project(const cv::Point3f &p3D) = 0;
virtual cv::Point2f project(const cv::Matx31f &m3D) = 0;
virtual cv::Point2f project(const cv::Mat& m3D) = 0;
virtual Eigen::Vector2d project(const Eigen::Vector3d & v3D) = 0;
virtual cv::Mat projectMat(const cv::Point3f& p3D) = 0;
virtual float uncertainty2(const Eigen::Matrix<double,2,1> &p2D) = 0;
virtual cv::Point3f unproject(const cv::Point2f &p2D) = 0;
virtual cv::Mat unprojectMat(const cv::Point2f &p2D) = 0;
virtual cv::Matx31f unprojectMat_(const cv::Point2f &p2D) = 0;
virtual cv::Mat projectJac(const cv::Point3f &p3D) = 0;
virtual Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D) = 0;
virtual cv::Mat unprojectJac(const cv::Point2f &p2D) = 0;
virtual bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated) = 0;
virtual cv::Mat toK() = 0;
virtual cv::Matx33f toK_() = 0;
virtual bool epipolarConstrain(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc) = 0;
virtual bool epipolarConstrain_(GeometricCamera* otherCamera, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc) = 0;
float getParameter(const int i){return mvParameters[i];}
void setParameter(const float p, const size_t i){mvParameters[i] = p;}
size_t size(){return mvParameters.size();}
virtual bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
cv::Mat& Tcw1, cv::Mat& Tcw2,
const float sigmaLevel1, const float sigmaLevel2,
cv::Mat& x3Dtriangulated) = 0;
unsigned int GetId() { return mnId; }
unsigned int GetType() { return mnType; }
const unsigned int CAM_PINHOLE = 0;
const unsigned int CAM_FISHEYE = 1;
static long unsigned int nNextId;
protected:
std::vector<float> mvParameters;
unsigned int mnId;
unsigned int mnType;
};
}
#endif //CAMERAMODELS_GEOMETRICCAMERA_H

View File

@@ -0,0 +1,116 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CAMERAMODELS_KANNALABRANDT8_H
#define CAMERAMODELS_KANNALABRANDT8_H
#include <assert.h>
#include <vector>
#include <opencv2/core/core.hpp>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/base_object.hpp>
#include <boost/serialization/vector.hpp>
#include "GeometricCamera.h"
#include "TwoViewReconstruction.h"
namespace ORB_SLAM3 {
class KannalaBrandt8 final : public GeometricCamera {
public:
KannalaBrandt8() : precision(1e-6) {
mvParameters.resize(8);
mnId=nNextId++;
mnType = CAM_FISHEYE;
}
KannalaBrandt8(const std::vector<float> _vParameters) : GeometricCamera(_vParameters), precision(1e-6), mvLappingArea(2,0) ,tvr(nullptr) {
assert(mvParameters.size() == 8);
mnId=nNextId++;
mnType = CAM_FISHEYE;
}
KannalaBrandt8(const std::vector<float> _vParameters, const float _precision) : GeometricCamera(_vParameters),
precision(_precision), mvLappingArea(2,0) {
assert(mvParameters.size() == 8);
mnId=nNextId++;
mnType = CAM_FISHEYE;
}
KannalaBrandt8(KannalaBrandt8* pKannala) : GeometricCamera(pKannala->mvParameters), precision(pKannala->precision), mvLappingArea(2,0) ,tvr(nullptr) {
assert(mvParameters.size() == 8);
mnId=nNextId++;
mnType = CAM_FISHEYE;
}
cv::Point2f project(const cv::Point3f &p3D);
cv::Point2f project(const cv::Matx31f &m3D);
cv::Point2f project(const cv::Mat& m3D);
Eigen::Vector2d project(const Eigen::Vector3d & v3D);
cv::Mat projectMat(const cv::Point3f& p3D);
float uncertainty2(const Eigen::Matrix<double,2,1> &p2D);
cv::Point3f unproject(const cv::Point2f &p2D);
cv::Mat unprojectMat(const cv::Point2f &p2D);
cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
cv::Mat projectJac(const cv::Point3f &p3D);
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
cv::Mat unprojectJac(const cv::Point2f &p2D);
bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
cv::Mat toK();
cv::Matx33f toK_();
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);
float TriangulateMatches(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc, cv::Mat& p3D);
float TriangulateMatches_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc, cv::Matx31f& p3D);
std::vector<int> mvLappingArea;
bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
cv::Mat& Tcw1, cv::Mat& Tcw2,
const float sigmaLevel1, const float sigmaLevel2,
cv::Mat& x3Dtriangulated);
friend std::ostream& operator<<(std::ostream& os, const KannalaBrandt8& kb);
friend std::istream& operator>>(std::istream& is, KannalaBrandt8& kb);
private:
const float precision;
//Parameters vector corresponds to
//[fx, fy, cx, cy, k0, k1, k2, k3]
TwoViewReconstruction* tvr;
void Triangulate(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Mat &Tcw1, const cv::Mat &Tcw2,cv::Mat &x3D);
void Triangulate_(const cv::Point2f &p1, const cv::Point2f &p2, const cv::Matx44f &Tcw1, const cv::Matx44f &Tcw2,cv::Matx31f &x3D);
};
}
//BOOST_CLASS_EXPORT_KEY(ORBSLAM2::KannalaBrandt8)
#endif //CAMERAMODELS_KANNALABRANDT8_H

View File

@@ -0,0 +1,108 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CAMERAMODELS_PINHOLE_H
#define CAMERAMODELS_PINHOLE_H
#include <assert.h>
#include <vector>
#include <opencv2/core/core.hpp>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/base_object.hpp>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/assume_abstract.hpp>
#include "GeometricCamera.h"
#include "TwoViewReconstruction.h"
namespace ORB_SLAM3 {
class Pinhole : public GeometricCamera {
public:
Pinhole() {
mvParameters.resize(4);
mnId=nNextId++;
mnType = CAM_PINHOLE;
}
Pinhole(const std::vector<float> _vParameters) : GeometricCamera(_vParameters), tvr(nullptr) {
assert(mvParameters.size() == 4);
mnId=nNextId++;
mnType = CAM_PINHOLE;
}
Pinhole(Pinhole* pPinhole) : GeometricCamera(pPinhole->mvParameters), tvr(nullptr) {
assert(mvParameters.size() == 4);
mnId=nNextId++;
mnType = CAM_PINHOLE;
}
~Pinhole(){
if(tvr) delete tvr;
}
cv::Point2f project(const cv::Point3f &p3D);
cv::Point2f project(const cv::Matx31f &m3D);
cv::Point2f project(const cv::Mat &m3D);
Eigen::Vector2d project(const Eigen::Vector3d & v3D);
cv::Mat projectMat(const cv::Point3f& p3D);
float uncertainty2(const Eigen::Matrix<double,2,1> &p2D);
cv::Point3f unproject(const cv::Point2f &p2D);
cv::Mat unprojectMat(const cv::Point2f &p2D);
cv::Matx31f unprojectMat_(const cv::Point2f &p2D);
cv::Mat projectJac(const cv::Point3f &p3D);
Eigen::Matrix<double,2,3> projectJac(const Eigen::Vector3d& v3D);
cv::Mat unprojectJac(const cv::Point2f &p2D);
bool ReconstructWithTwoViews(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
cv::Mat toK();
cv::Matx33f toK_();
bool epipolarConstrain(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Mat& R12, const cv::Mat& t12, const float sigmaLevel, const float unc);
bool epipolarConstrain_(GeometricCamera* pCamera2, const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, const cv::Matx33f& R12, const cv::Matx31f& t12, const float sigmaLevel, const float unc);
bool matchAndtriangulate(const cv::KeyPoint& kp1, const cv::KeyPoint& kp2, GeometricCamera* pOther,
cv::Mat& Tcw1, cv::Mat& Tcw2,
const float sigmaLevel1, const float sigmaLevel2,
cv::Mat& x3Dtriangulated) { return false;}
friend std::ostream& operator<<(std::ostream& os, const Pinhole& ph);
friend std::istream& operator>>(std::istream& os, Pinhole& ph);
private:
cv::Mat SkewSymmetricMatrix(const cv::Mat &v);
cv::Matx33f SkewSymmetricMatrix_(const cv::Matx31f &v);
//Parameters vector corresponds to
// [fx, fy, cx, cy]
TwoViewReconstruction* tvr;
};
}
//BOOST_CLASS_EXPORT_KEY(ORBSLAM2::Pinhole)
#endif //CAMERAMODELS_PINHOLE_H

6
include/Config.h Normal file
View File

@@ -0,0 +1,6 @@
#ifndef CONFIG_H
#define CONFIG_H
//#define REGISTER_TIMES
#endif // CONFIG_H

63
include/Converter.h Normal file
View File

@@ -0,0 +1,63 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CONVERTER_H
#define CONVERTER_H
#include<opencv2/core/core.hpp>
#include<Eigen/Dense>
#include"Thirdparty/g2o/g2o/types/types_six_dof_expmap.h"
#include"Thirdparty/g2o/g2o/types/types_seven_dof_expmap.h"
namespace ORB_SLAM3
{
class Converter
{
public:
static std::vector<cv::Mat> toDescriptorVector(const cv::Mat &Descriptors);
static g2o::SE3Quat toSE3Quat(const cv::Mat &cvT);
static g2o::SE3Quat toSE3Quat(const g2o::Sim3 &gSim3);
static cv::Mat toCvMat(const g2o::SE3Quat &SE3);
static cv::Mat toCvMat(const g2o::Sim3 &Sim3);
static cv::Mat toCvMat(const Eigen::Matrix<double,4,4> &m);
static cv::Mat toCvMat(const Eigen::Matrix3d &m);
static cv::Mat toCvMat(const Eigen::Matrix<double,3,1> &m);
static cv::Mat toCvMat(const Eigen::MatrixXd &m);
static cv::Mat toCvSE3(const Eigen::Matrix<double,3,3> &R, const Eigen::Matrix<double,3,1> &t);
static cv::Mat tocvSkewMatrix(const cv::Mat &v);
static Eigen::Matrix<double,3,1> toVector3d(const cv::Mat &cvVector);
static Eigen::Matrix<double,3,1> toVector3d(const cv::Point3f &cvPoint);
static Eigen::Matrix<double,3,3> toMatrix3d(const cv::Mat &cvMat3);
static Eigen::Matrix<double,4,4> toMatrix4d(const cv::Mat &cvMat4);
static std::vector<float> toQuaternion(const cv::Mat &M);
static bool isRotationMatrix(const cv::Mat &R);
static std::vector<float> toEuler(const cv::Mat &R);
};
}// namespace ORB_SLAM
#endif // CONVERTER_H

327
include/Frame.h Normal file
View File

@@ -0,0 +1,327 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef FRAME_H
#define FRAME_H
#include<vector>
#include "Thirdparty/DBoW2/DBoW2/BowVector.h"
#include "Thirdparty/DBoW2/DBoW2/FeatureVector.h"
#include "ImuTypes.h"
#include "ORBVocabulary.h"
#include "Config.h"
#include <mutex>
#include <opencv2/opencv.hpp>
namespace ORB_SLAM3
{
#define FRAME_GRID_ROWS 48
#define FRAME_GRID_COLS 64
class MapPoint;
class KeyFrame;
class ConstraintPoseImu;
class GeometricCamera;
class ORBextractor;
class Frame
{
public:
Frame();
// Copy constructor.
Frame(const Frame &frame);
// Constructor for stereo cameras.
Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeStamp, ORBextractor* extractorLeft, ORBextractor* extractorRight, ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, GeometricCamera* pCamera,Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
// Constructor for RGB-D cameras.
Frame(const cv::Mat &imGray, const cv::Mat &imDepth, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, GeometricCamera* pCamera,Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
// Constructor for Monocular cameras.
Frame(const cv::Mat &imGray, const double &timeStamp, ORBextractor* extractor,ORBVocabulary* voc, GeometricCamera* pCamera, cv::Mat &distCoef, const float &bf, const float &thDepth, Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
// Extract ORB on the image. 0 for left image and 1 for right image.
void ExtractORB(int flag, const cv::Mat &im, const int x0, const int x1);
// Compute Bag of Words representation.
void ComputeBoW();
// Set the camera pose. (Imu pose is not modified!)
void SetPose(cv::Mat Tcw);
void GetPose(cv::Mat &Tcw);
// Set IMU velocity
void SetVelocity(const cv::Mat &Vwb);
// Set IMU pose and velocity (implicitly changes camera pose)
void SetImuPoseVelocity(const cv::Mat &Rwb, const cv::Mat &twb, const cv::Mat &Vwb);
// Computes rotation, translation and camera center matrices from the camera pose.
void UpdatePoseMatrices();
// Returns the camera center.
inline cv::Mat GetCameraCenter(){
return mOw.clone();
}
// Returns inverse of rotation
inline cv::Mat GetRotationInverse(){
return mRwc.clone();
}
cv::Mat GetImuPosition();
cv::Mat GetImuRotation();
cv::Mat GetImuPose();
void SetNewBias(const IMU::Bias &b);
// Check if a MapPoint is in the frustum of the camera
// and fill variables of the MapPoint to be used by the tracking
bool isInFrustum(MapPoint* pMP, float viewingCosLimit);
bool ProjectPointDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v);
cv::Mat inRefCoordinates(cv::Mat pCw);
// Compute the cell of a keypoint (return false if outside the grid)
bool PosInGrid(const cv::KeyPoint &kp, int &posX, int &posY);
vector<size_t> GetFeaturesInArea(const float &x, const float &y, const float &r, const int minLevel=-1, const int maxLevel=-1, const bool bRight = false) const;
// Search a match for each keypoint in the left image to a keypoint in the right image.
// If there is a match, depth is computed and the right coordinate associated to the left keypoint is stored.
void ComputeStereoMatches();
// Associate a "right" coordinate to a keypoint if there is valid depth in the depthmap.
void ComputeStereoFromRGBD(const cv::Mat &imDepth);
// Backprojects a keypoint (if stereo/depth info available) into 3D world coordinates.
cv::Mat UnprojectStereo(const int &i);
ConstraintPoseImu* mpcpi;
bool imuIsPreintegrated();
void setIntegrated();
cv::Mat mRwc;
cv::Mat mOw;
public:
// Vocabulary used for relocalization.
ORBVocabulary* mpORBvocabulary;
// Feature extractor. The right is used only in the stereo case.
ORBextractor* mpORBextractorLeft, *mpORBextractorRight;
// Frame timestamp.
double mTimeStamp;
// Calibration matrix and OpenCV distortion parameters.
cv::Mat mK;
static float fx;
static float fy;
static float cx;
static float cy;
static float invfx;
static float invfy;
cv::Mat mDistCoef;
// Stereo baseline multiplied by fx.
float mbf;
// Stereo baseline in meters.
float mb;
// Threshold close/far points. Close points are inserted from 1 view.
// Far points are inserted as in the monocular case from 2 views.
float mThDepth;
// Number of KeyPoints.
int N;
// Vector of keypoints (original for visualization) and undistorted (actually used by the system).
// In the stereo case, mvKeysUn is redundant as images must be rectified.
// In the RGB-D case, RGB images can be distorted.
std::vector<cv::KeyPoint> mvKeys, mvKeysRight;
std::vector<cv::KeyPoint> mvKeysUn;
// Corresponding stereo coordinate and depth for each keypoint.
std::vector<MapPoint*> mvpMapPoints;
// "Monocular" keypoints have a negative value.
std::vector<float> mvuRight;
std::vector<float> mvDepth;
// Bag of Words Vector structures.
DBoW2::BowVector mBowVec;
DBoW2::FeatureVector mFeatVec;
// ORB descriptor, each row associated to a keypoint.
cv::Mat mDescriptors, mDescriptorsRight;
// MapPoints associated to keypoints, NULL pointer if no association.
// Flag to identify outlier associations.
std::vector<bool> mvbOutlier;
int mnCloseMPs;
// Keypoints are assigned to cells in a grid to reduce matching complexity when projecting MapPoints.
static float mfGridElementWidthInv;
static float mfGridElementHeightInv;
std::vector<std::size_t> mGrid[FRAME_GRID_COLS][FRAME_GRID_ROWS];
// Camera pose.
cv::Mat mTcw;
// IMU linear velocity
cv::Mat mVw;
cv::Mat mPredRwb, mPredtwb, mPredVwb;
IMU::Bias mPredBias;
// IMU bias
IMU::Bias mImuBias;
// Imu calibration
IMU::Calib mImuCalib;
// Imu preintegration from last keyframe
IMU::Preintegrated* mpImuPreintegrated;
KeyFrame* mpLastKeyFrame;
// Pointer to previous frame
Frame* mpPrevFrame;
IMU::Preintegrated* mpImuPreintegratedFrame;
// Current and Next Frame id.
static long unsigned int nNextId;
long unsigned int mnId;
// Reference Keyframe.
KeyFrame* mpReferenceKF;
// Scale pyramid info.
int mnScaleLevels;
float mfScaleFactor;
float mfLogScaleFactor;
vector<float> mvScaleFactors;
vector<float> mvInvScaleFactors;
vector<float> mvLevelSigma2;
vector<float> mvInvLevelSigma2;
// Undistorted Image Bounds (computed once).
static float mnMinX;
static float mnMaxX;
static float mnMinY;
static float mnMaxY;
static bool mbInitialComputations;
map<long unsigned int, cv::Point2f> mmProjectPoints;
map<long unsigned int, cv::Point2f> mmMatchedInImage;
string mNameFile;
int mnDataset;
#ifdef REGISTER_TIMES
double mTimeORB_Ext;
double mTimeStereoMatch;
#endif
private:
// Undistort keypoints given OpenCV distortion parameters.
// Only for the RGB-D case. Stereo must be already rectified!
// (called in the constructor).
void UndistortKeyPoints();
// Computes image bounds for the undistorted image (called in the constructor).
void ComputeImageBounds(const cv::Mat &imLeft);
// Assign keypoints to the grid for speed up feature matching (called in the constructor).
void AssignFeaturesToGrid();
// Rotation, translation and camera center
cv::Mat mRcw;
cv::Mat mtcw;
//==mtwc
cv::Matx31f mOwx;
cv::Matx33f mRcwx;
cv::Matx31f mtcwx;
bool mbImuPreintegrated;
std::mutex *mpMutexImu;
public:
GeometricCamera* mpCamera, *mpCamera2;
//Number of KeyPoints extracted in the left and right images
int Nleft, Nright;
//Number of Non Lapping Keypoints
int monoLeft, monoRight;
//For stereo matching
std::vector<int> mvLeftToRightMatch, mvRightToLeftMatch;
//For stereo fisheye matching
static cv::BFMatcher BFmatcher;
//Triangulated stereo observations using as reference the left camera. These are
//computed during ComputeStereoFishEyeMatches
std::vector<cv::Mat> mvStereo3Dpoints;
//Grid for the right image
std::vector<std::size_t> mGridRight[FRAME_GRID_COLS][FRAME_GRID_ROWS];
cv::Mat mTlr, mRlr, mtlr, mTrl;
cv::Matx34f mTrlx, mTlrx;
Frame(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timeStamp, ORBextractor* extractorLeft, ORBextractor* extractorRight, ORBVocabulary* voc, cv::Mat &K, cv::Mat &distCoef, const float &bf, const float &thDepth, GeometricCamera* pCamera, GeometricCamera* pCamera2, cv::Mat& Tlr,Frame* pPrevF = static_cast<Frame*>(NULL), const IMU::Calib &ImuCalib = IMU::Calib());
//Stereo fisheye
void ComputeStereoFishEyeMatches();
bool isInFrustumChecks(MapPoint* pMP, float viewingCosLimit, bool bRight = false);
cv::Mat UnprojectStereoFishEye(const int &i);
cv::Mat imgLeft, imgRight;
void PrintPointDistribution(){
int left = 0, right = 0;
int Nlim = (Nleft != -1) ? Nleft : N;
for(int i = 0; i < N; i++){
if(mvpMapPoints[i] && !mvbOutlier[i]){
if(i < Nlim) left++;
else right++;
}
}
cout << "Point distribution in Frame: left-> " << left << " --- right-> " << right << endl;
}
};
}// namespace ORB_SLAM
#endif // FRAME_H

88
include/FrameDrawer.h Normal file
View File

@@ -0,0 +1,88 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef FRAMEDRAWER_H
#define FRAMEDRAWER_H
#include "Tracking.h"
#include "MapPoint.h"
#include "Atlas.h"
#include<opencv2/core/core.hpp>
#include<opencv2/features2d/features2d.hpp>
#include<mutex>
#include <unordered_set>
namespace ORB_SLAM3
{
class Tracking;
class Viewer;
class FrameDrawer
{
public:
FrameDrawer(Atlas* pAtlas);
// Update info from the last processed frame.
void Update(Tracking *pTracker);
// Draw last processed frame.
cv::Mat DrawFrame(bool bOldFeatures=true);
cv::Mat DrawRightFrame();
bool both;
protected:
void DrawTextInfo(cv::Mat &im, int nState, cv::Mat &imText);
// Info of the frame to be drawn
cv::Mat mIm, mImRight;
int N;
vector<cv::KeyPoint> mvCurrentKeys,mvCurrentKeysRight;
vector<bool> mvbMap, mvbVO;
bool mbOnlyTracking;
int mnTracked, mnTrackedVO;
vector<cv::KeyPoint> mvIniKeys;
vector<int> mvIniMatches;
int mState;
Atlas* mpAtlas;
std::mutex mMutex;
vector<pair<cv::Point2f, cv::Point2f> > mvTracks;
Frame mCurrentFrame;
vector<MapPoint*> mvpLocalMap;
vector<cv::KeyPoint> mvMatchedKeys;
vector<MapPoint*> mvpMatchedMPs;
vector<cv::KeyPoint> mvOutlierKeys;
vector<MapPoint*> mvpOutlierMPs;
map<long unsigned int, cv::Point2f> mmProjectPoints;
map<long unsigned int, cv::Point2f> mmMatchedInImage;
};
} //namespace ORB_SLAM
#endif // FRAMEDRAWER_H

863
include/G2oTypes.h Normal file
View File

@@ -0,0 +1,863 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef G2OTYPES_H
#define G2OTYPES_H
#include "Thirdparty/g2o/g2o/core/base_vertex.h"
#include "Thirdparty/g2o/g2o/core/base_binary_edge.h"
#include "Thirdparty/g2o/g2o/types/types_sba.h"
#include "Thirdparty/g2o/g2o/core/base_multi_edge.h"
#include "Thirdparty/g2o/g2o/core/base_unary_edge.h"
#include<opencv2/core/core.hpp>
#include <Eigen/Core>
#include <Eigen/Geometry>
#include <Eigen/Dense>
#include <Frame.h>
#include <KeyFrame.h>
#include"Converter.h"
#include <math.h>
namespace ORB_SLAM3
{
class KeyFrame;
class Frame;
class GeometricCamera;
typedef Eigen::Matrix<double, 6, 1> Vector6d;
typedef Eigen::Matrix<double, 9, 1> Vector9d;
typedef Eigen::Matrix<double, 12, 1> Vector12d;
typedef Eigen::Matrix<double, 15, 1> Vector15d;
typedef Eigen::Matrix<double, 12, 12> Matrix12d;
typedef Eigen::Matrix<double, 15, 15> Matrix15d;
typedef Eigen::Matrix<double, 9, 9> Matrix9d;
Eigen::Matrix3d ExpSO3(const double x, const double y, const double z);
Eigen::Matrix3d ExpSO3(const Eigen::Vector3d &w);
Eigen::Vector3d LogSO3(const Eigen::Matrix3d &R);
Eigen::Matrix3d InverseRightJacobianSO3(const Eigen::Vector3d &v);
Eigen::Matrix3d RightJacobianSO3(const Eigen::Vector3d &v);
Eigen::Matrix3d RightJacobianSO3(const double x, const double y, const double z);
Eigen::Matrix3d Skew(const Eigen::Vector3d &w);
Eigen::Matrix3d InverseRightJacobianSO3(const double x, const double y, const double z);
Eigen::Matrix3d NormalizeRotation(const Eigen::Matrix3d &R);
class ImuCamPose
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
ImuCamPose(){}
ImuCamPose(KeyFrame* pKF);
ImuCamPose(Frame* pF);
ImuCamPose(Eigen::Matrix3d &_Rwc, Eigen::Vector3d &_twc, KeyFrame* pKF);
void SetParam(const std::vector<Eigen::Matrix3d> &_Rcw, const std::vector<Eigen::Vector3d> &_tcw, const std::vector<Eigen::Matrix3d> &_Rbc,
const std::vector<Eigen::Vector3d> &_tbc, const double &_bf);
void Update(const double *pu); // update in the imu reference
void UpdateW(const double *pu); // update in the world reference
Eigen::Vector2d Project(const Eigen::Vector3d &Xw, int cam_idx=0) const; // Mono
Eigen::Vector3d ProjectStereo(const Eigen::Vector3d &Xw, int cam_idx=0) const; // Stereo
bool isDepthPositive(const Eigen::Vector3d &Xw, int cam_idx=0) const;
public:
// For IMU
Eigen::Matrix3d Rwb;
Eigen::Vector3d twb;
// For set of cameras
std::vector<Eigen::Matrix3d> Rcw;
std::vector<Eigen::Vector3d> tcw;
std::vector<Eigen::Matrix3d> Rcb, Rbc;
std::vector<Eigen::Vector3d> tcb, tbc;
double bf;
std::vector<GeometricCamera*> pCamera;
// For posegraph 4DoF
Eigen::Matrix3d Rwb0;
Eigen::Matrix3d DR;
int its;
};
class InvDepthPoint
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
InvDepthPoint(){}
InvDepthPoint(double _rho, double _u, double _v, KeyFrame* pHostKF);
void Update(const double *pu);
double rho;
double u, v; // they are not variables, observation in the host frame
double fx, fy, cx, cy, bf; // from host frame
int its;
};
// Optimizable parameters are IMU pose
class VertexPose : public g2o::BaseVertex<6,ImuCamPose>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexPose(){}
VertexPose(KeyFrame* pKF){
setEstimate(ImuCamPose(pKF));
}
VertexPose(Frame* pF){
setEstimate(ImuCamPose(pF));
}
virtual bool read(std::istream& is);
virtual bool write(std::ostream& os) const;
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
_estimate.Update(update_);
updateCache();
}
};
class VertexPose4DoF : public g2o::BaseVertex<4,ImuCamPose>
{
// Translation and yaw are the only optimizable variables
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexPose4DoF(){}
VertexPose4DoF(KeyFrame* pKF){
setEstimate(ImuCamPose(pKF));
}
VertexPose4DoF(Frame* pF){
setEstimate(ImuCamPose(pF));
}
VertexPose4DoF(Eigen::Matrix3d &_Rwc, Eigen::Vector3d &_twc, KeyFrame* pKF){
setEstimate(ImuCamPose(_Rwc, _twc, pKF));
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
double update6DoF[6];
update6DoF[0] = 0;
update6DoF[1] = 0;
update6DoF[2] = update_[0];
update6DoF[3] = update_[1];
update6DoF[4] = update_[2];
update6DoF[5] = update_[3];
_estimate.UpdateW(update6DoF);
updateCache();
}
};
class VertexVelocity : public g2o::BaseVertex<3,Eigen::Vector3d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexVelocity(){}
VertexVelocity(KeyFrame* pKF);
VertexVelocity(Frame* pF);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
Eigen::Vector3d uv;
uv << update_[0], update_[1], update_[2];
setEstimate(estimate()+uv);
}
};
class VertexGyroBias : public g2o::BaseVertex<3,Eigen::Vector3d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexGyroBias(){}
VertexGyroBias(KeyFrame* pKF);
VertexGyroBias(Frame* pF);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
Eigen::Vector3d ubg;
ubg << update_[0], update_[1], update_[2];
setEstimate(estimate()+ubg);
}
};
class VertexAccBias : public g2o::BaseVertex<3,Eigen::Vector3d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexAccBias(){}
VertexAccBias(KeyFrame* pKF);
VertexAccBias(Frame* pF);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
Eigen::Vector3d uba;
uba << update_[0], update_[1], update_[2];
setEstimate(estimate()+uba);
}
};
// Gravity direction vertex
class GDirection
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
GDirection(){}
GDirection(Eigen::Matrix3d pRwg): Rwg(pRwg){}
void Update(const double *pu)
{
Rwg=Rwg*ExpSO3(pu[0],pu[1],0.0);
}
Eigen::Matrix3d Rwg, Rgw;
int its;
};
class VertexGDir : public g2o::BaseVertex<2,GDirection>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexGDir(){}
VertexGDir(Eigen::Matrix3d pRwg){
setEstimate(GDirection(pRwg));
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
_estimate.Update(update_);
updateCache();
}
};
// scale vertex
class VertexScale : public g2o::BaseVertex<1,double>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexScale(){
setEstimate(1.0);
}
VertexScale(double ps){
setEstimate(ps);
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl(){
setEstimate(1.0);
}
virtual void oplusImpl(const double *update_){
setEstimate(estimate()*exp(*update_));
}
};
// Inverse depth point (just one parameter, inverse depth at the host frame)
class VertexInvDepth : public g2o::BaseVertex<1,InvDepthPoint>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexInvDepth(){}
VertexInvDepth(double invDepth, double u, double v, KeyFrame* pHostKF){
setEstimate(InvDepthPoint(invDepth, u, v, pHostKF));
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
virtual void setToOriginImpl() {
}
virtual void oplusImpl(const double* update_){
_estimate.Update(update_);
updateCache();
}
};
class EdgeMono : public g2o::BaseBinaryEdge<2,Eigen::Vector2d,g2o::VertexSBAPointXYZ,VertexPose>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeMono(int cam_idx_=0): cam_idx(cam_idx_){
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const g2o::VertexSBAPointXYZ* VPoint = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[1]);
const Eigen::Vector2d obs(_measurement);
_error = obs - VPose->estimate().Project(VPoint->estimate(),cam_idx);
}
virtual void linearizeOplus();
bool isDepthPositive()
{
const g2o::VertexSBAPointXYZ* VPoint = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[1]);
return VPose->estimate().isDepthPositive(VPoint->estimate(),cam_idx);
}
Eigen::Matrix<double,2,9> GetJacobian(){
linearizeOplus();
Eigen::Matrix<double,2,9> J;
J.block<2,3>(0,0) = _jacobianOplusXi;
J.block<2,6>(0,3) = _jacobianOplusXj;
return J;
}
Eigen::Matrix<double,9,9> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,2,9> J;
J.block<2,3>(0,0) = _jacobianOplusXi;
J.block<2,6>(0,3) = _jacobianOplusXj;
return J.transpose()*information()*J;
}
public:
const int cam_idx;
};
class EdgeMonoOnlyPose : public g2o::BaseUnaryEdge<2,Eigen::Vector2d,VertexPose>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeMonoOnlyPose(const cv::Mat &Xw_, int cam_idx_=0):Xw(Converter::toVector3d(Xw_)),
cam_idx(cam_idx_){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[0]);
const Eigen::Vector2d obs(_measurement);
_error = obs - VPose->estimate().Project(Xw,cam_idx);
}
virtual void linearizeOplus();
bool isDepthPositive()
{
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[0]);
return VPose->estimate().isDepthPositive(Xw,cam_idx);
}
Eigen::Matrix<double,6,6> GetHessian(){
linearizeOplus();
return _jacobianOplusXi.transpose()*information()*_jacobianOplusXi;
}
public:
const Eigen::Vector3d Xw;
const int cam_idx;
};
class EdgeStereo : public g2o::BaseBinaryEdge<3,Eigen::Vector3d,g2o::VertexSBAPointXYZ,VertexPose>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeStereo(int cam_idx_=0): cam_idx(cam_idx_){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const g2o::VertexSBAPointXYZ* VPoint = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[1]);
const Eigen::Vector3d obs(_measurement);
_error = obs - VPose->estimate().ProjectStereo(VPoint->estimate(),cam_idx);
}
virtual void linearizeOplus();
Eigen::Matrix<double,3,9> GetJacobian(){
linearizeOplus();
Eigen::Matrix<double,3,9> J;
J.block<3,3>(0,0) = _jacobianOplusXi;
J.block<3,6>(0,3) = _jacobianOplusXj;
return J;
}
Eigen::Matrix<double,9,9> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,3,9> J;
J.block<3,3>(0,0) = _jacobianOplusXi;
J.block<3,6>(0,3) = _jacobianOplusXj;
return J.transpose()*information()*J;
}
public:
const int cam_idx;
};
class EdgeStereoOnlyPose : public g2o::BaseUnaryEdge<3,Eigen::Vector3d,VertexPose>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeStereoOnlyPose(const cv::Mat &Xw_, int cam_idx_=0):
Xw(Converter::toVector3d(Xw_)), cam_idx(cam_idx_){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexPose* VPose = static_cast<const VertexPose*>(_vertices[0]);
const Eigen::Vector3d obs(_measurement);
_error = obs - VPose->estimate().ProjectStereo(Xw, cam_idx);
}
virtual void linearizeOplus();
Eigen::Matrix<double,6,6> GetHessian(){
linearizeOplus();
return _jacobianOplusXi.transpose()*information()*_jacobianOplusXi;
}
public:
const Eigen::Vector3d Xw; // 3D point coordinates
const int cam_idx;
};
class EdgeInertial : public g2o::BaseMultiEdge<9,Vector9d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeInertial(IMU::Preintegrated* pInt);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError();
virtual void linearizeOplus();
Eigen::Matrix<double,24,24> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,9,24> J;
J.block<9,6>(0,0) = _jacobianOplus[0];
J.block<9,3>(0,6) = _jacobianOplus[1];
J.block<9,3>(0,9) = _jacobianOplus[2];
J.block<9,3>(0,12) = _jacobianOplus[3];
J.block<9,6>(0,15) = _jacobianOplus[4];
J.block<9,3>(0,21) = _jacobianOplus[5];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,18,18> GetHessianNoPose1(){
linearizeOplus();
Eigen::Matrix<double,9,18> J;
J.block<9,3>(0,0) = _jacobianOplus[1];
J.block<9,3>(0,3) = _jacobianOplus[2];
J.block<9,3>(0,6) = _jacobianOplus[3];
J.block<9,6>(0,9) = _jacobianOplus[4];
J.block<9,3>(0,15) = _jacobianOplus[5];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,9,9> GetHessian2(){
linearizeOplus();
Eigen::Matrix<double,9,9> J;
J.block<9,6>(0,0) = _jacobianOplus[4];
J.block<9,3>(0,6) = _jacobianOplus[5];
return J.transpose()*information()*J;
}
const Eigen::Matrix3d JRg, JVg, JPg;
const Eigen::Matrix3d JVa, JPa;
IMU::Preintegrated* mpInt;
const double dt;
Eigen::Vector3d g;
};
// Edge inertial whre gravity is included as optimizable variable and it is not supposed to be pointing in -z axis, as well as scale
class EdgeInertialGS : public g2o::BaseMultiEdge<9,Vector9d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
// EdgeInertialGS(IMU::Preintegrated* pInt);
EdgeInertialGS(IMU::Preintegrated* pInt);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError();
virtual void linearizeOplus();
const Eigen::Matrix3d JRg, JVg, JPg;
const Eigen::Matrix3d JVa, JPa;
IMU::Preintegrated* mpInt;
const double dt;
Eigen::Vector3d g, gI;
Eigen::Matrix<double,27,27> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,9,27> J;
J.block<9,6>(0,0) = _jacobianOplus[0];
J.block<9,3>(0,6) = _jacobianOplus[1];
J.block<9,3>(0,9) = _jacobianOplus[2];
J.block<9,3>(0,12) = _jacobianOplus[3];
J.block<9,6>(0,15) = _jacobianOplus[4];
J.block<9,3>(0,21) = _jacobianOplus[5];
J.block<9,2>(0,24) = _jacobianOplus[6];
J.block<9,1>(0,26) = _jacobianOplus[7];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,27,27> GetHessian2(){
linearizeOplus();
Eigen::Matrix<double,9,27> J;
J.block<9,3>(0,0) = _jacobianOplus[2];
J.block<9,3>(0,3) = _jacobianOplus[3];
J.block<9,2>(0,6) = _jacobianOplus[6];
J.block<9,1>(0,8) = _jacobianOplus[7];
J.block<9,3>(0,9) = _jacobianOplus[1];
J.block<9,3>(0,12) = _jacobianOplus[5];
J.block<9,6>(0,15) = _jacobianOplus[0];
J.block<9,6>(0,21) = _jacobianOplus[4];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,9,9> GetHessian3(){
linearizeOplus();
Eigen::Matrix<double,9,9> J;
J.block<9,3>(0,0) = _jacobianOplus[2];
J.block<9,3>(0,3) = _jacobianOplus[3];
J.block<9,2>(0,6) = _jacobianOplus[6];
J.block<9,1>(0,8) = _jacobianOplus[7];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,1,1> GetHessianScale(){
linearizeOplus();
Eigen::Matrix<double,9,1> J = _jacobianOplus[7];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,3,3> GetHessianBiasGyro(){
linearizeOplus();
Eigen::Matrix<double,9,3> J = _jacobianOplus[2];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,3,3> GetHessianBiasAcc(){
linearizeOplus();
Eigen::Matrix<double,9,3> J = _jacobianOplus[3];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,2,2> GetHessianGDir(){
linearizeOplus();
Eigen::Matrix<double,9,2> J = _jacobianOplus[6];
return J.transpose()*information()*J;
}
};
class EdgeGyroRW : public g2o::BaseBinaryEdge<3,Eigen::Vector3d,VertexGyroBias,VertexGyroBias>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeGyroRW(){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexGyroBias* VG1= static_cast<const VertexGyroBias*>(_vertices[0]);
const VertexGyroBias* VG2= static_cast<const VertexGyroBias*>(_vertices[1]);
_error = VG2->estimate()-VG1->estimate();
}
virtual void linearizeOplus(){
_jacobianOplusXi = -Eigen::Matrix3d::Identity();
_jacobianOplusXj.setIdentity();
}
Eigen::Matrix<double,6,6> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,3,6> J;
J.block<3,3>(0,0) = _jacobianOplusXi;
J.block<3,3>(0,3) = _jacobianOplusXj;
return J.transpose()*information()*J;
}
Eigen::Matrix3d GetHessian2(){
linearizeOplus();
return _jacobianOplusXj.transpose()*information()*_jacobianOplusXj;
}
};
class EdgeAccRW : public g2o::BaseBinaryEdge<3,Eigen::Vector3d,VertexAccBias,VertexAccBias>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeAccRW(){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexAccBias* VA1= static_cast<const VertexAccBias*>(_vertices[0]);
const VertexAccBias* VA2= static_cast<const VertexAccBias*>(_vertices[1]);
_error = VA2->estimate()-VA1->estimate();
}
virtual void linearizeOplus(){
_jacobianOplusXi = -Eigen::Matrix3d::Identity();
_jacobianOplusXj.setIdentity();
}
Eigen::Matrix<double,6,6> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,3,6> J;
J.block<3,3>(0,0) = _jacobianOplusXi;
J.block<3,3>(0,3) = _jacobianOplusXj;
return J.transpose()*information()*J;
}
Eigen::Matrix3d GetHessian2(){
linearizeOplus();
return _jacobianOplusXj.transpose()*information()*_jacobianOplusXj;
}
};
class ConstraintPoseImu
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
ConstraintPoseImu(const Eigen::Matrix3d &Rwb_, const Eigen::Vector3d &twb_, const Eigen::Vector3d &vwb_,
const Eigen::Vector3d &bg_, const Eigen::Vector3d &ba_, const Matrix15d &H_):
Rwb(Rwb_), twb(twb_), vwb(vwb_), bg(bg_), ba(ba_), H(H_)
{
H = (H+H)/2;
Eigen::SelfAdjointEigenSolver<Eigen::Matrix<double,15,15> > es(H);
Eigen::Matrix<double,15,1> eigs = es.eigenvalues();
for(int i=0;i<15;i++)
if(eigs[i]<1e-12)
eigs[i]=0;
H = es.eigenvectors()*eigs.asDiagonal()*es.eigenvectors().transpose();
}
ConstraintPoseImu(const cv::Mat &Rwb_, const cv::Mat &twb_, const cv::Mat &vwb_,
const IMU::Bias &b, const cv::Mat &H_)
{
Rwb = Converter::toMatrix3d(Rwb_);
twb = Converter::toVector3d(twb_);
vwb = Converter::toVector3d(vwb_);
bg << b.bwx, b.bwy, b.bwz;
ba << b.bax, b.bay, b.baz;
for(int i=0;i<15;i++)
for(int j=0;j<15;j++)
H(i,j)=H_.at<float>(i,j);
H = (H+H)/2;
Eigen::SelfAdjointEigenSolver<Eigen::Matrix<double,15,15> > es(H);
Eigen::Matrix<double,15,1> eigs = es.eigenvalues();
for(int i=0;i<15;i++)
if(eigs[i]<1e-12)
eigs[i]=0;
H = es.eigenvectors()*eigs.asDiagonal()*es.eigenvectors().transpose();
}
Eigen::Matrix3d Rwb;
Eigen::Vector3d twb;
Eigen::Vector3d vwb;
Eigen::Vector3d bg;
Eigen::Vector3d ba;
Matrix15d H;
};
class EdgePriorPoseImu : public g2o::BaseMultiEdge<15,Vector15d>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgePriorPoseImu(ConstraintPoseImu* c);
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError();
virtual void linearizeOplus();
Eigen::Matrix<double,15,15> GetHessian(){
linearizeOplus();
Eigen::Matrix<double,15,15> J;
J.block<15,6>(0,0) = _jacobianOplus[0];
J.block<15,3>(0,6) = _jacobianOplus[1];
J.block<15,3>(0,9) = _jacobianOplus[2];
J.block<15,3>(0,12) = _jacobianOplus[3];
return J.transpose()*information()*J;
}
Eigen::Matrix<double,9,9> GetHessianNoPose(){
linearizeOplus();
Eigen::Matrix<double,15,9> J;
J.block<15,3>(0,0) = _jacobianOplus[1];
J.block<15,3>(0,3) = _jacobianOplus[2];
J.block<15,3>(0,6) = _jacobianOplus[3];
return J.transpose()*information()*J;
}
Eigen::Matrix3d Rwb;
Eigen::Vector3d twb, vwb;
Eigen::Vector3d bg, ba;
};
// Priors for biases
class EdgePriorAcc : public g2o::BaseUnaryEdge<3,Eigen::Vector3d,VertexAccBias>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgePriorAcc(const cv::Mat &bprior_):bprior(Converter::toVector3d(bprior_)){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexAccBias* VA = static_cast<const VertexAccBias*>(_vertices[0]);
_error = bprior - VA->estimate();
}
virtual void linearizeOplus();
Eigen::Matrix<double,3,3> GetHessian(){
linearizeOplus();
return _jacobianOplusXi.transpose()*information()*_jacobianOplusXi;
}
const Eigen::Vector3d bprior;
};
class EdgePriorGyro : public g2o::BaseUnaryEdge<3,Eigen::Vector3d,VertexGyroBias>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgePriorGyro(const cv::Mat &bprior_):bprior(Converter::toVector3d(bprior_)){}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexGyroBias* VG = static_cast<const VertexGyroBias*>(_vertices[0]);
_error = bprior - VG->estimate();
}
virtual void linearizeOplus();
Eigen::Matrix<double,3,3> GetHessian(){
linearizeOplus();
return _jacobianOplusXi.transpose()*information()*_jacobianOplusXi;
}
const Eigen::Vector3d bprior;
};
class Edge4DoF : public g2o::BaseBinaryEdge<6,Vector6d,VertexPose4DoF,VertexPose4DoF>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
Edge4DoF(const Eigen::Matrix4d &deltaT){
dTij = deltaT;
dRij = deltaT.block<3,3>(0,0);
dtij = deltaT.block<3,1>(0,3);
}
virtual bool read(std::istream& is){return false;}
virtual bool write(std::ostream& os) const{return false;}
void computeError(){
const VertexPose4DoF* VPi = static_cast<const VertexPose4DoF*>(_vertices[0]);
const VertexPose4DoF* VPj = static_cast<const VertexPose4DoF*>(_vertices[1]);
_error << LogSO3(VPi->estimate().Rcw[0]*VPj->estimate().Rcw[0].transpose()*dRij.transpose()),
VPi->estimate().Rcw[0]*(-VPj->estimate().Rcw[0].transpose()*VPj->estimate().tcw[0])+VPi->estimate().tcw[0] - dtij;
}
// virtual void linearizeOplus(); // numerical implementation
Eigen::Matrix4d dTij;
Eigen::Matrix3d dRij;
Eigen::Vector3d dtij;
};
} //namespace ORB_SLAM2
#endif // G2OTYPES_H

285
include/ImuTypes.h Normal file
View File

@@ -0,0 +1,285 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef IMUTYPES_H
#define IMUTYPES_H
#include<vector>
#include<utility>
#include<opencv2/core/core.hpp>
#include <Eigen/Core>
#include <Eigen/Geometry>
#include <Eigen/Dense>
#include <mutex>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/vector.hpp>
namespace ORB_SLAM3
{
namespace IMU
{
const float GRAVITY_VALUE=9.81;
//IMU measurement (gyro, accelerometer and timestamp)
class Point
{
public:
Point(const float &acc_x, const float &acc_y, const float &acc_z,
const float &ang_vel_x, const float &ang_vel_y, const float &ang_vel_z,
const double &timestamp): a(acc_x,acc_y,acc_z), w(ang_vel_x,ang_vel_y,ang_vel_z), t(timestamp){}
Point(const cv::Point3f Acc, const cv::Point3f Gyro, const double &timestamp):
a(Acc.x,Acc.y,Acc.z), w(Gyro.x,Gyro.y,Gyro.z), t(timestamp){}
public:
cv::Point3f a;
cv::Point3f w;
double t;
};
//IMU biases (gyro and accelerometer)
class Bias
{
friend class boost::serialization::access;
template<class Archive>
void serialize(Archive & ar, const unsigned int version)
{
ar & bax;
ar & bay;
ar & baz;
ar & bwx;
ar & bwy;
ar & bwz;
}
public:
Bias():bax(0),bay(0),baz(0),bwx(0),bwy(0),bwz(0){}
Bias(const float &b_acc_x, const float &b_acc_y, const float &b_acc_z,
const float &b_ang_vel_x, const float &b_ang_vel_y, const float &b_ang_vel_z):
bax(b_acc_x), bay(b_acc_y), baz(b_acc_z), bwx(b_ang_vel_x), bwy(b_ang_vel_y), bwz(b_ang_vel_z){}
void CopyFrom(Bias &b);
friend std::ostream& operator<< (std::ostream &out, const Bias &b);
public:
float bax, bay, baz;
float bwx, bwy, bwz;
};
//IMU calibration (Tbc, Tcb, noise)
class Calib
{
template<class Archive>
void serializeMatrix(Archive &ar, cv::Mat& mat, const unsigned int version)
{
int cols, rows, type;
bool continuous;
if (Archive::is_saving::value) {
cols = mat.cols; rows = mat.rows; type = mat.type();
continuous = mat.isContinuous();
}
ar & cols & rows & type & continuous;
if (Archive::is_loading::value)
mat.create(rows, cols, type);
if (continuous) {
const unsigned int data_size = rows * cols * mat.elemSize();
ar & boost::serialization::make_array(mat.ptr(), data_size);
} else {
const unsigned int row_size = cols*mat.elemSize();
for (int i = 0; i < rows; i++) {
ar & boost::serialization::make_array(mat.ptr(i), row_size);
}
}
}
friend class boost::serialization::access;
template<class Archive>
void serialize(Archive & ar, const unsigned int version)
{
serializeMatrix(ar,Tcb,version);
serializeMatrix(ar,Tbc,version);
serializeMatrix(ar,Cov,version);
serializeMatrix(ar,CovWalk,version);
}
public:
Calib(const cv::Mat &Tbc_, const float &ng, const float &na, const float &ngw, const float &naw)
{
Set(Tbc_,ng,na,ngw,naw);
}
Calib(const Calib &calib);
Calib(){}
void Set(const cv::Mat &Tbc_, const float &ng, const float &na, const float &ngw, const float &naw);
public:
cv::Mat Tcb;
cv::Mat Tbc;
cv::Mat Cov, CovWalk;
};
//Integration of 1 gyro measurement
class IntegratedRotation
{
public:
IntegratedRotation(){}
IntegratedRotation(const cv::Point3f &angVel, const Bias &imuBias, const float &time);
public:
float deltaT; //integration time
cv::Mat deltaR; //integrated rotation
cv::Mat rightJ; // right jacobian
};
//Preintegration of Imu Measurements
class Preintegrated
{
template<class Archive>
void serializeMatrix(Archive &ar, cv::Mat& mat, const unsigned int version)
{
int cols, rows, type;
bool continuous;
if (Archive::is_saving::value) {
cols = mat.cols; rows = mat.rows; type = mat.type();
continuous = mat.isContinuous();
}
ar & cols & rows & type & continuous;
if (Archive::is_loading::value)
mat.create(rows, cols, type);
if (continuous) {
const unsigned int data_size = rows * cols * mat.elemSize();
ar & boost::serialization::make_array(mat.ptr(), data_size);
} else {
const unsigned int row_size = cols*mat.elemSize();
for (int i = 0; i < rows; i++) {
ar & boost::serialization::make_array(mat.ptr(i), row_size);
}
}
}
friend class boost::serialization::access;
template<class Archive>
void serialize(Archive & ar, const unsigned int version)
{
ar & dT;
serializeMatrix(ar,C,version);
serializeMatrix(ar,Info,version);
serializeMatrix(ar,Nga,version);
serializeMatrix(ar,NgaWalk,version);
ar & b;
serializeMatrix(ar,dR,version);
serializeMatrix(ar,dV,version);
serializeMatrix(ar,dP,version);
serializeMatrix(ar,JRg,version);
serializeMatrix(ar,JVg,version);
serializeMatrix(ar,JVa,version);
serializeMatrix(ar,JPg,version);
serializeMatrix(ar,JPa,version);
serializeMatrix(ar,avgA,version);
serializeMatrix(ar,avgW,version);
ar & bu;
serializeMatrix(ar,db,version);
ar & mvMeasurements;
}
public:
Preintegrated(const Bias &b_, const Calib &calib);
Preintegrated(Preintegrated* pImuPre);
Preintegrated() {}
~Preintegrated() {}
void CopyFrom(Preintegrated* pImuPre);
void Initialize(const Bias &b_);
void IntegrateNewMeasurement(const cv::Point3f &acceleration, const cv::Point3f &angVel, const float &dt);
void Reintegrate();
void MergePrevious(Preintegrated* pPrev);
void SetNewBias(const Bias &bu_);
IMU::Bias GetDeltaBias(const Bias &b_);
cv::Mat GetDeltaRotation(const Bias &b_);
cv::Mat GetDeltaVelocity(const Bias &b_);
cv::Mat GetDeltaPosition(const Bias &b_);
cv::Mat GetUpdatedDeltaRotation();
cv::Mat GetUpdatedDeltaVelocity();
cv::Mat GetUpdatedDeltaPosition();
cv::Mat GetOriginalDeltaRotation();
cv::Mat GetOriginalDeltaVelocity();
cv::Mat GetOriginalDeltaPosition();
Eigen::Matrix<double,15,15> GetInformationMatrix();
cv::Mat GetDeltaBias();
Bias GetOriginalBias();
Bias GetUpdatedBias();
public:
float dT;
cv::Mat C;
cv::Mat Info;
cv::Mat Nga, NgaWalk;
// Values for the original bias (when integration was computed)
Bias b;
cv::Mat dR, dV, dP;
cv::Mat JRg, JVg, JVa, JPg, JPa;
cv::Mat avgA;
cv::Mat avgW;
private:
// Updated bias
Bias bu;
// Dif between original and updated bias
// This is used to compute the updated values of the preintegration
cv::Mat db;
struct integrable
{
integrable(const cv::Point3f &a_, const cv::Point3f &w_ , const float &t_):a(a_),w(w_),t(t_){}
cv::Point3f a;
cv::Point3f w;
float t;
};
std::vector<integrable> mvMeasurements;
std::mutex mMutex;
};
// Lie Algebra Functions
cv::Mat ExpSO3(const float &x, const float &y, const float &z);
Eigen::Matrix<double,3,3> ExpSO3(const double &x, const double &y, const double &z);
cv::Mat ExpSO3(const cv::Mat &v);
cv::Mat LogSO3(const cv::Mat &R);
cv::Mat RightJacobianSO3(const float &x, const float &y, const float &z);
cv::Mat RightJacobianSO3(const cv::Mat &v);
cv::Mat InverseRightJacobianSO3(const float &x, const float &y, const float &z);
cv::Mat InverseRightJacobianSO3(const cv::Mat &v);
cv::Mat Skew(const cv::Mat &v);
cv::Mat NormalizeRotation(const cv::Mat &R);
}
} //namespace ORB_SLAM2
#endif // IMUTYPES_H

106
include/Initializer.h Normal file
View File

@@ -0,0 +1,106 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef INITIALIZER_H
#define INITIALIZER_H
#include<opencv2/opencv.hpp>
#include "Frame.h"
#include <unordered_set>
namespace ORB_SLAM3
{
class Map;
// THIS IS THE INITIALIZER FOR MONOCULAR SLAM. NOT USED IN THE STEREO OR RGBD CASE.
class Initializer
{
typedef pair<int,int> Match;
public:
// Fix the reference frame
Initializer(const Frame &ReferenceFrame, float sigma = 1.0, int iterations = 200);
// Computes in parallel a fundamental matrix and a homography
// Selects a model and tries to recover the motion and the structure from motion
bool Initialize(const Frame &CurrentFrame, const vector<int> &vMatches12, cv::Mat &R21,
cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated);
private:
void FindHomography(vector<bool> &vbMatchesInliers, float &score, cv::Mat &H21);
void FindFundamental(vector<bool> &vbInliers, float &score, cv::Mat &F21);
cv::Mat ComputeH21(const vector<cv::Point2f> &vP1, const vector<cv::Point2f> &vP2);
cv::Mat ComputeF21(const vector<cv::Point2f> &vP1, const vector<cv::Point2f> &vP2);
float CheckHomography(const cv::Mat &H21, const cv::Mat &H12, vector<bool> &vbMatchesInliers, float sigma);
float CheckFundamental(const cv::Mat &F21, vector<bool> &vbMatchesInliers, float sigma);
bool ReconstructF(vector<bool> &vbMatchesInliers, cv::Mat &F21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated, float minParallax, int minTriangulated);
bool ReconstructH(vector<bool> &vbMatchesInliers, cv::Mat &H21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, vector<cv::Point3f> &vP3D, vector<bool> &vbTriangulated, float minParallax, int minTriangulated);
void Triangulate(const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &P1, const cv::Mat &P2, cv::Mat &x3D);
void Normalize(const vector<cv::KeyPoint> &vKeys, vector<cv::Point2f> &vNormalizedPoints, cv::Mat &T);
// void Normalize(const vector<cv::Point2f> &vKeys, vector<cv::Point2f> &vNormalizedPoints, cv::Mat &T);
int CheckRT(const cv::Mat &R, const cv::Mat &t, const vector<cv::KeyPoint> &vKeys1, const vector<cv::KeyPoint> &vKeys2,
const vector<Match> &vMatches12, vector<bool> &vbInliers,
const cv::Mat &K, vector<cv::Point3f> &vP3D, float th2, vector<bool> &vbGood, float &parallax);
void DecomposeE(const cv::Mat &E, cv::Mat &R1, cv::Mat &R2, cv::Mat &t);
// Keypoints from Reference Frame (Frame 1)
vector<cv::KeyPoint> mvKeys1;
// Keypoints from Current Frame (Frame 2)
vector<cv::KeyPoint> mvKeys2;
// Current Matches from Reference to Current
vector<Match> mvMatches12;
vector<bool> mvbMatched1;
// Calibration
cv::Mat mK;
// Standard Deviation and Variance
float mSigma, mSigma2;
// Ransac max iterations
int mMaxIterations;
// Ransac sets
vector<vector<size_t> > mvSets;
GeometricCamera* mpCamera;
};
} //namespace ORB_SLAM
#endif // INITIALIZER_H

389
include/KeyFrame.h Normal file
View File

@@ -0,0 +1,389 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef KEYFRAME_H
#define KEYFRAME_H
#include "MapPoint.h"
#include "Thirdparty/DBoW2/DBoW2/BowVector.h"
#include "Thirdparty/DBoW2/DBoW2/FeatureVector.h"
#include "ORBVocabulary.h"
#include "ORBextractor.h"
#include "Frame.h"
#include "KeyFrameDatabase.h"
#include "ImuTypes.h"
#include "GeometricCamera.h"
#include <mutex>
#include <boost/serialization/base_object.hpp>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/map.hpp>
namespace ORB_SLAM3
{
class Map;
class MapPoint;
class Frame;
class KeyFrameDatabase;
class GeometricCamera;
class KeyFrame
{
public:
KeyFrame();
KeyFrame(Frame &F, Map* pMap, KeyFrameDatabase* pKFDB);
// Pose functions
void SetPose(const cv::Mat &Tcw);
void SetVelocity(const cv::Mat &Vw_);
cv::Mat GetPose();
cv::Mat GetPoseInverse();
cv::Mat GetCameraCenter();
cv::Mat GetImuPosition();
cv::Mat GetImuRotation();
cv::Mat GetImuPose();
cv::Mat GetStereoCenter();
cv::Mat GetRotation();
cv::Mat GetTranslation();
cv::Mat GetVelocity();
cv::Matx33f GetRotation_();
cv::Matx31f GetTranslation_();
cv::Matx31f GetCameraCenter_();
cv::Matx33f GetRightRotation_();
cv::Matx31f GetRightTranslation_();
cv::Matx44f GetRightPose_();
cv::Matx31f GetRightCameraCenter_();
cv::Matx44f GetPose_();
// Bag of Words Representation
void ComputeBoW();
// Covisibility graph functions
void AddConnection(KeyFrame* pKF, const int &weight);
void EraseConnection(KeyFrame* pKF);
void UpdateConnections(bool upParent=true);
void UpdateBestCovisibles();
std::set<KeyFrame *> GetConnectedKeyFrames();
std::vector<KeyFrame* > GetVectorCovisibleKeyFrames();
std::vector<KeyFrame*> GetBestCovisibilityKeyFrames(const int &N);
std::vector<KeyFrame*> GetCovisiblesByWeight(const int &w);
int GetWeight(KeyFrame* pKF);
// Spanning tree functions
void AddChild(KeyFrame* pKF);
void EraseChild(KeyFrame* pKF);
void ChangeParent(KeyFrame* pKF);
std::set<KeyFrame*> GetChilds();
KeyFrame* GetParent();
bool hasChild(KeyFrame* pKF);
void SetFirstConnection(bool bFirst);
// Loop Edges
void AddLoopEdge(KeyFrame* pKF);
std::set<KeyFrame*> GetLoopEdges();
// Merge Edges
void AddMergeEdge(KeyFrame* pKF);
set<KeyFrame*> GetMergeEdges();
// MapPoint observation functions
int GetNumberMPs();
void AddMapPoint(MapPoint* pMP, const size_t &idx);
void EraseMapPointMatch(const int &idx);
void EraseMapPointMatch(MapPoint* pMP);
void ReplaceMapPointMatch(const int &idx, MapPoint* pMP);
std::set<MapPoint*> GetMapPoints();
std::vector<MapPoint*> GetMapPointMatches();
int TrackedMapPoints(const int &minObs);
MapPoint* GetMapPoint(const size_t &idx);
// KeyPoint functions
std::vector<size_t> GetFeaturesInArea(const float &x, const float &y, const float &r, const bool bRight = false) const;
cv::Mat UnprojectStereo(int i);
cv::Matx31f UnprojectStereo_(int i);
// Image
bool IsInImage(const float &x, const float &y) const;
// Enable/Disable bad flag changes
void SetNotErase();
void SetErase();
// Set/check bad flag
void SetBadFlag();
bool isBad();
// Compute Scene Depth (q=2 median). Used in monocular.
float ComputeSceneMedianDepth(const int q);
static bool weightComp( int a, int b){
return a>b;
}
static bool lId(KeyFrame* pKF1, KeyFrame* pKF2){
return pKF1->mnId<pKF2->mnId;
}
Map* GetMap();
void UpdateMap(Map* pMap);
void SetNewBias(const IMU::Bias &b);
cv::Mat GetGyroBias();
cv::Mat GetAccBias();
IMU::Bias GetImuBias();
bool ProjectPointDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v);
bool ProjectPointUnDistort(MapPoint* pMP, cv::Point2f &kp, float &u, float &v);
void SetORBVocabulary(ORBVocabulary* pORBVoc);
void SetKeyFrameDatabase(KeyFrameDatabase* pKFDB);
bool bImu;
// The following variables are accesed from only 1 thread or never change (no mutex needed).
public:
static long unsigned int nNextId;
long unsigned int mnId;
const long unsigned int mnFrameId;
const double mTimeStamp;
// Grid (to speed up feature matching)
const int mnGridCols;
const int mnGridRows;
const float mfGridElementWidthInv;
const float mfGridElementHeightInv;
// Variables used by the tracking
long unsigned int mnTrackReferenceForFrame;
long unsigned int mnFuseTargetForKF;
// Variables used by the local mapping
long unsigned int mnBALocalForKF;
long unsigned int mnBAFixedForKF;
//Number of optimizations by BA(amount of iterations in BA)
long unsigned int mnNumberOfOpt;
// Variables used by the keyframe database
long unsigned int mnLoopQuery;
int mnLoopWords;
float mLoopScore;
long unsigned int mnRelocQuery;
int mnRelocWords;
float mRelocScore;
long unsigned int mnMergeQuery;
int mnMergeWords;
float mMergeScore;
long unsigned int mnPlaceRecognitionQuery;
int mnPlaceRecognitionWords;
float mPlaceRecognitionScore;
bool mbCurrentPlaceRecognition;
// Variables used by loop closing
cv::Mat mTcwGBA;
cv::Mat mTcwBefGBA;
cv::Mat mVwbGBA;
cv::Mat mVwbBefGBA;
IMU::Bias mBiasGBA;
long unsigned int mnBAGlobalForKF;
// Variables used by merging
cv::Mat mTcwMerge;
cv::Mat mTcwBefMerge;
cv::Mat mTwcBefMerge;
cv::Mat mVwbMerge;
cv::Mat mVwbBefMerge;
IMU::Bias mBiasMerge;
long unsigned int mnMergeCorrectedForKF;
long unsigned int mnMergeForKF;
float mfScaleMerge;
long unsigned int mnBALocalForMerge;
float mfScale;
// Calibration parameters
const float fx, fy, cx, cy, invfx, invfy, mbf, mb, mThDepth;
cv::Mat mDistCoef;
// Number of KeyPoints
const int N;
// KeyPoints, stereo coordinate and descriptors (all associated by an index)
const std::vector<cv::KeyPoint> mvKeys;
const std::vector<cv::KeyPoint> mvKeysUn;
const std::vector<float> mvuRight; // negative value for monocular points
const std::vector<float> mvDepth; // negative value for monocular points
const cv::Mat mDescriptors;
//BoW
DBoW2::BowVector mBowVec;
DBoW2::FeatureVector mFeatVec;
// Pose relative to parent (this is computed when bad flag is activated)
cv::Mat mTcp;
// Scale
const int mnScaleLevels;
const float mfScaleFactor;
const float mfLogScaleFactor;
const std::vector<float> mvScaleFactors;
const std::vector<float> mvLevelSigma2;
const std::vector<float> mvInvLevelSigma2;
// Image bounds and calibration
const int mnMinX;
const int mnMinY;
const int mnMaxX;
const int mnMaxY;
const cv::Mat mK;
// Preintegrated IMU measurements from previous keyframe
KeyFrame* mPrevKF;
KeyFrame* mNextKF;
IMU::Preintegrated* mpImuPreintegrated;
IMU::Calib mImuCalib;
unsigned int mnOriginMapId;
string mNameFile;
int mnDataset;
std::vector <KeyFrame*> mvpLoopCandKFs;
std::vector <KeyFrame*> mvpMergeCandKFs;
bool mbHasHessian;
cv::Mat mHessianPose;
// The following variables need to be accessed trough a mutex to be thread safe.
protected:
// SE3 Pose and camera center
cv::Mat Tcw;
cv::Mat Twc;
cv::Mat Ow;
cv::Mat Cw; // Stereo middel point. Only for visualization
cv::Matx44f Tcw_, Twc_, Tlr_;
cv::Matx31f Ow_;
// IMU position
cv::Mat Owb;
// Velocity (Only used for inertial SLAM)
cv::Mat Vw;
// Imu bias
IMU::Bias mImuBias;
// MapPoints associated to keypoints
std::vector<MapPoint*> mvpMapPoints;
// BoW
KeyFrameDatabase* mpKeyFrameDB;
ORBVocabulary* mpORBvocabulary;
// Grid over the image to speed up feature matching
std::vector< std::vector <std::vector<size_t> > > mGrid;
std::map<KeyFrame*,int> mConnectedKeyFrameWeights;
std::vector<KeyFrame*> mvpOrderedConnectedKeyFrames;
std::vector<int> mvOrderedWeights;
// Spanning Tree and Loop Edges
bool mbFirstConnection;
KeyFrame* mpParent;
std::set<KeyFrame*> mspChildrens;
std::set<KeyFrame*> mspLoopEdges;
std::set<KeyFrame*> mspMergeEdges;
// Bad flags
bool mbNotErase;
bool mbToBeErased;
bool mbBad;
float mHalfBaseline; // Only for visualization
Map* mpMap;
std::mutex mMutexPose; // for pose, velocity and biases
std::mutex mMutexConnections;
std::mutex mMutexFeatures;
std::mutex mMutexMap;
public:
GeometricCamera* mpCamera, *mpCamera2;
//Indexes of stereo observations correspondences
std::vector<int> mvLeftToRightMatch, mvRightToLeftMatch;
//Transformation matrix between cameras in stereo fisheye
cv::Mat mTlr;
cv::Mat mTrl;
//KeyPoints in the right image (for stereo fisheye, coordinates are needed)
const std::vector<cv::KeyPoint> mvKeysRight;
const int NLeft, NRight;
std::vector< std::vector <std::vector<size_t> > > mGridRight;
cv::Mat GetRightPose();
cv::Mat GetRightPoseInverse();
cv::Mat GetRightPoseInverseH();
cv::Mat GetRightCameraCenter();
cv::Mat GetRightRotation();
cv::Mat GetRightTranslation();
cv::Mat imgLeft, imgRight;
void PrintPointDistribution(){
int left = 0, right = 0;
int Nlim = (NLeft != -1) ? NLeft : N;
for(int i = 0; i < N; i++){
if(mvpMapPoints[i]){
if(i < Nlim) left++;
else right++;
}
}
cout << "Point distribution in KeyFrame: left-> " << left << " --- right-> " << right << endl;
}
};
} //namespace ORB_SLAM
#endif // KEYFRAME_H

View File

@@ -0,0 +1,88 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef KEYFRAMEDATABASE_H
#define KEYFRAMEDATABASE_H
#include <vector>
#include <list>
#include <set>
#include "KeyFrame.h"
#include "Frame.h"
#include "ORBVocabulary.h"
#include "Map.h"
#include <boost/serialization/base_object.hpp>
#include <boost/serialization/vector.hpp>
#include <boost/serialization/list.hpp>
#include<mutex>
namespace ORB_SLAM3
{
class KeyFrame;
class Frame;
class Map;
class KeyFrameDatabase
{
public:
KeyFrameDatabase(const ORBVocabulary &voc);
void add(KeyFrame* pKF);
void erase(KeyFrame* pKF);
void clear();
void clearMap(Map* pMap);
// Loop Detection(DEPRECATED)
std::vector<KeyFrame *> DetectLoopCandidates(KeyFrame* pKF, float minScore);
// Loop and Merge Detection
void DetectCandidates(KeyFrame* pKF, float minScore,vector<KeyFrame*>& vpLoopCand, vector<KeyFrame*>& vpMergeCand);
void DetectBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &vpLoopCand, vector<KeyFrame*> &vpMergeCand, int nMinWords);
void DetectNBestCandidates(KeyFrame *pKF, vector<KeyFrame*> &vpLoopCand, vector<KeyFrame*> &vpMergeCand, int nNumCandidates);
// Relocalization
std::vector<KeyFrame*> DetectRelocalizationCandidates(Frame* F, Map* pMap);
void SetORBVocabulary(ORBVocabulary* pORBVoc);
protected:
// Associated vocabulary
const ORBVocabulary* mpVoc;
// Inverted file
std::vector<list<KeyFrame*> > mvInvertedFile;
// Mutex
std::mutex mMutex;
};
} //namespace ORB_SLAM
#endif

201
include/LocalMapping.h Normal file
View File

@@ -0,0 +1,201 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef LOCALMAPPING_H
#define LOCALMAPPING_H
#include "KeyFrame.h"
#include "Atlas.h"
#include "LoopClosing.h"
#include "Tracking.h"
#include "KeyFrameDatabase.h"
#include "Initializer.h"
#include <mutex>
namespace ORB_SLAM3
{
class System;
class Tracking;
class LoopClosing;
class Atlas;
class LocalMapping
{
public:
LocalMapping(System* pSys, Atlas* pAtlas, const float bMonocular, bool bInertial, const string &_strSeqName=std::string());
void SetLoopCloser(LoopClosing* pLoopCloser);
void SetTracker(Tracking* pTracker);
// Main function
void Run();
void InsertKeyFrame(KeyFrame* pKF);
void EmptyQueue();
// Thread Synch
void RequestStop();
void RequestReset();
void RequestResetActiveMap(Map* pMap);
bool Stop();
void Release();
bool isStopped();
bool stopRequested();
bool AcceptKeyFrames();
void SetAcceptKeyFrames(bool flag);
bool SetNotStop(bool flag);
void InterruptBA();
void RequestFinish();
bool isFinished();
int KeyframesInQueue(){
unique_lock<std::mutex> lock(mMutexNewKFs);
return mlNewKeyFrames.size();
}
bool IsInitializing();
double GetCurrKFTime();
KeyFrame* GetCurrKF();
std::mutex mMutexImuInit;
Eigen::MatrixXd mcovInertial;
Eigen::Matrix3d mRwg;
Eigen::Vector3d mbg;
Eigen::Vector3d mba;
double mScale;
double mInitTime;
double mCostTime;
bool mbNewInit;
unsigned int mInitSect;
unsigned int mIdxInit;
unsigned int mnKFs;
double mFirstTs;
int mnMatchesInliers;
bool mbNotBA1;
bool mbNotBA2;
bool mbBadImu;
bool mbWriteStats;
// not consider far points (clouds)
bool mbFarPoints;
float mThFarPoints;
#ifdef REGISTER_TIMES
vector<double> vdKFInsert_ms;
vector<double> vdMPCulling_ms;
vector<double> vdMPCreation_ms;
vector<double> vdLBA_ms;
vector<double> vdKFCulling_ms;
vector<double> vdLMTotal_ms;
vector<double> vdLBASync_ms;
vector<double> vdKFCullingSync_ms;
vector<int> vnLBA_edges;
vector<int> vnLBA_KFopt;
vector<int> vnLBA_KFfixed;
vector<int> vnLBA_MPs;
int nLBA_exec;
int nLBA_abort;
#endif
protected:
bool CheckNewKeyFrames();
void ProcessNewKeyFrame();
void CreateNewMapPoints();
void MapPointCulling();
void SearchInNeighbors();
void KeyFrameCulling();
cv::Mat ComputeF12(KeyFrame* &pKF1, KeyFrame* &pKF2);
cv::Matx33f ComputeF12_(KeyFrame* &pKF1, KeyFrame* &pKF2);
cv::Mat SkewSymmetricMatrix(const cv::Mat &v);
cv::Matx33f SkewSymmetricMatrix_(const cv::Matx31f &v);
System *mpSystem;
bool mbMonocular;
bool mbInertial;
void ResetIfRequested();
bool mbResetRequested;
bool mbResetRequestedActiveMap;
Map* mpMapToReset;
std::mutex mMutexReset;
bool CheckFinish();
void SetFinish();
bool mbFinishRequested;
bool mbFinished;
std::mutex mMutexFinish;
Atlas* mpAtlas;
LoopClosing* mpLoopCloser;
Tracking* mpTracker;
std::list<KeyFrame*> mlNewKeyFrames;
KeyFrame* mpCurrentKeyFrame;
std::list<MapPoint*> mlpRecentAddedMapPoints;
std::mutex mMutexNewKFs;
bool mbAbortBA;
bool mbStopped;
bool mbStopRequested;
bool mbNotStop;
std::mutex mMutexStop;
bool mbAcceptKeyFrames;
std::mutex mMutexAccept;
void InitializeIMU(float priorG = 1e2, float priorA = 1e6, bool bFirst = false);
void ScaleRefinement();
bool bInitializing;
Eigen::MatrixXd infoInertial;
int mNumLM;
int mNumKFCulling;
float mTinit;
int countRefinement;
//DEBUG
ofstream f_lm;
};
} //namespace ORB_SLAM
#endif // LOCALMAPPING_H

226
include/LoopClosing.h Normal file
View File

@@ -0,0 +1,226 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef LOOPCLOSING_H
#define LOOPCLOSING_H
#include "KeyFrame.h"
#include "LocalMapping.h"
#include "Atlas.h"
#include "ORBVocabulary.h"
#include "Tracking.h"
#include "Config.h"
#include "KeyFrameDatabase.h"
#include <boost/algorithm/string.hpp>
#include <thread>
#include <mutex>
#include "Thirdparty/g2o/g2o/types/types_seven_dof_expmap.h"
namespace ORB_SLAM3
{
class Tracking;
class LocalMapping;
class KeyFrameDatabase;
class Map;
class LoopClosing
{
public:
typedef pair<set<KeyFrame*>,int> ConsistentGroup;
typedef map<KeyFrame*,g2o::Sim3,std::less<KeyFrame*>,
Eigen::aligned_allocator<std::pair<KeyFrame* const, g2o::Sim3> > > KeyFrameAndPose;
public:
LoopClosing(Atlas* pAtlas, KeyFrameDatabase* pDB, ORBVocabulary* pVoc,const bool bFixScale);
void SetTracker(Tracking* pTracker);
void SetLocalMapper(LocalMapping* pLocalMapper);
// Main function
void Run();
void InsertKeyFrame(KeyFrame *pKF);
void RequestReset();
void RequestResetActiveMap(Map* pMap);
// This function will run in a separate thread
void RunGlobalBundleAdjustment(Map* pActiveMap, unsigned long nLoopKF);
bool isRunningGBA(){
unique_lock<std::mutex> lock(mMutexGBA);
return mbRunningGBA;
}
bool isFinishedGBA(){
unique_lock<std::mutex> lock(mMutexGBA);
return mbFinishedGBA;
}
void RequestFinish();
bool isFinished();
Viewer* mpViewer;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
#ifdef REGISTER_TIMES
double timeDetectBoW;
std::vector<double> vTimeBoW_ms;
std::vector<double> vTimeSE3_ms;
std::vector<double> vTimePRTotal_ms;
std::vector<double> vTimeLoopFusion_ms;
std::vector<double> vTimeLoopEssent_ms;
std::vector<double> vTimeLoopTotal_ms;
std::vector<double> vTimeMergeFusion_ms;
std::vector<double> vTimeMergeBA_ms;
std::vector<double> vTimeMergeTotal_ms;
std::vector<double> vTimeFullGBA_ms;
std::vector<double> vTimeMapUpdate_ms;
std::vector<double> vTimeGBATotal_ms;
#endif
protected:
bool CheckNewKeyFrames();
//Methods to implement the new place recognition algorithm
bool NewDetectCommonRegions();
bool DetectAndReffineSim3FromLastKF(KeyFrame* pCurrentKF, KeyFrame* pMatchedKF, g2o::Sim3 &gScw, int &nNumProjMatches,
std::vector<MapPoint*> &vpMPs, std::vector<MapPoint*> &vpMatchedMPs);
bool DetectCommonRegionsFromBoW(std::vector<KeyFrame*> &vpBowCand, KeyFrame* &pMatchedKF, KeyFrame* &pLastCurrentKF, g2o::Sim3 &g2oScw,
int &nNumCoincidences, std::vector<MapPoint*> &vpMPs, std::vector<MapPoint*> &vpMatchedMPs);
bool DetectCommonRegionsFromLastKF(KeyFrame* pCurrentKF, KeyFrame* pMatchedKF, g2o::Sim3 &gScw, int &nNumProjMatches,
std::vector<MapPoint*> &vpMPs, std::vector<MapPoint*> &vpMatchedMPs);
int FindMatchesByProjection(KeyFrame* pCurrentKF, KeyFrame* pMatchedKFw, g2o::Sim3 &g2oScw,
set<MapPoint*> &spMatchedMPinOrigin, vector<MapPoint*> &vpMapPoints,
vector<MapPoint*> &vpMatchedMapPoints);
void SearchAndFuse(const KeyFrameAndPose &CorrectedPosesMap, vector<MapPoint*> &vpMapPoints);
void SearchAndFuse(const vector<KeyFrame*> &vConectedKFs, vector<MapPoint*> &vpMapPoints);
void CorrectLoop();
void MergeLocal();
void MergeLocal2();
void ResetIfRequested();
bool mbResetRequested;
bool mbResetActiveMapRequested;
Map* mpMapToReset;
std::mutex mMutexReset;
bool CheckFinish();
void SetFinish();
bool mbFinishRequested;
bool mbFinished;
std::mutex mMutexFinish;
Atlas* mpAtlas;
Tracking* mpTracker;
KeyFrameDatabase* mpKeyFrameDB;
ORBVocabulary* mpORBVocabulary;
LocalMapping *mpLocalMapper;
std::list<KeyFrame*> mlpLoopKeyFrameQueue;
std::mutex mMutexLoopQueue;
// Loop detector parameters
float mnCovisibilityConsistencyTh;
// Loop detector variables
KeyFrame* mpCurrentKF;
KeyFrame* mpLastCurrentKF;
KeyFrame* mpMatchedKF;
std::vector<ConsistentGroup> mvConsistentGroups;
std::vector<KeyFrame*> mvpEnoughConsistentCandidates;
std::vector<KeyFrame*> mvpCurrentConnectedKFs;
std::vector<MapPoint*> mvpCurrentMatchedPoints;
std::vector<MapPoint*> mvpLoopMapPoints;
cv::Mat mScw;
g2o::Sim3 mg2oScw;
//-------
Map* mpLastMap;
bool mbLoopDetected;
int mnLoopNumCoincidences;
int mnLoopNumNotFound;
KeyFrame* mpLoopLastCurrentKF;
g2o::Sim3 mg2oLoopSlw;
g2o::Sim3 mg2oLoopScw;
KeyFrame* mpLoopMatchedKF;
std::vector<MapPoint*> mvpLoopMPs;
std::vector<MapPoint*> mvpLoopMatchedMPs;
bool mbMergeDetected;
int mnMergeNumCoincidences;
int mnMergeNumNotFound;
KeyFrame* mpMergeLastCurrentKF;
g2o::Sim3 mg2oMergeSlw;
g2o::Sim3 mg2oMergeSmw;
g2o::Sim3 mg2oMergeScw;
KeyFrame* mpMergeMatchedKF;
std::vector<MapPoint*> mvpMergeMPs;
std::vector<MapPoint*> mvpMergeMatchedMPs;
std::vector<KeyFrame*> mvpMergeConnectedKFs;
g2o::Sim3 mSold_new;
//-------
long unsigned int mLastLoopKFid;
// Variables related to Global Bundle Adjustment
bool mbRunningGBA;
bool mbFinishedGBA;
bool mbStopGBA;
std::mutex mMutexGBA;
std::thread* mpThreadGBA;
// Fix scale in the stereo/RGB-D case
bool mbFixScale;
bool mnFullBAIdx;
vector<double> vdPR_CurrentTime;
vector<double> vdPR_MatchedTime;
vector<int> vnPR_TypeRecogn;
};
} //namespace ORB_SLAM
#endif // LOOPCLOSING_H

254
include/MLPnPsolver.h Normal file
View File

@@ -0,0 +1,254 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
/******************************************************************************
* Author: Steffen Urban *
* Contact: urbste@gmail.com *
* License: Copyright (c) 2016 Steffen Urban, ANU. All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* * Redistributions of source code must retain the above copyright *
* notice, this list of conditions and the following disclaimer. *
* * Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* * Neither the name of ANU nor the names of its contributors may be *
* used to endorse or promote products derived from this software without *
* specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"*
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE *
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE *
* ARE DISCLAIMED. IN NO EVENT SHALL ANU OR THE CONTRIBUTORS BE LIABLE *
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL *
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR *
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER *
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT *
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY *
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF *
* SUCH DAMAGE. *
******************************************************************************/
#ifndef ORB_SLAM3_MLPNPSOLVER_H
#define ORB_SLAM3_MLPNPSOLVER_H
#include "MapPoint.h"
#include "Frame.h"
#include<Eigen/Dense>
#include<Eigen/Sparse>
namespace ORB_SLAM3{
class MLPnPsolver {
public:
MLPnPsolver(const Frame &F, const vector<MapPoint*> &vpMapPointMatches);
~MLPnPsolver();
void SetRansacParameters(double probability = 0.99, int minInliers = 8, int maxIterations = 300, int minSet = 6, float epsilon = 0.4,
float th2 = 5.991);
cv::Mat iterate(int nIterations, bool &bNoMore, vector<bool> &vbInliers, int &nInliers);
//Type definitions needed by the original code
/** A 3-vector of unit length used to describe landmark observations/bearings
* in camera frames (always expressed in camera frames)
*/
typedef Eigen::Vector3d bearingVector_t;
/** An array of bearing-vectors */
typedef std::vector<bearingVector_t, Eigen::aligned_allocator<bearingVector_t> >
bearingVectors_t;
/** A 2-matrix containing the 2D covariance information of a bearing vector
*/
typedef Eigen::Matrix2d cov2_mat_t;
/** A 3-matrix containing the 3D covariance information of a bearing vector */
typedef Eigen::Matrix3d cov3_mat_t;
/** An array of 3D covariance matrices */
typedef std::vector<cov3_mat_t, Eigen::aligned_allocator<cov3_mat_t> >
cov3_mats_t;
/** A 3-vector describing a point in 3D-space */
typedef Eigen::Vector3d point_t;
/** An array of 3D-points */
typedef std::vector<point_t, Eigen::aligned_allocator<point_t> >
points_t;
/** A homogeneous 3-vector describing a point in 3D-space */
typedef Eigen::Vector4d point4_t;
/** An array of homogeneous 3D-points */
typedef std::vector<point4_t, Eigen::aligned_allocator<point4_t> >
points4_t;
/** A 3-vector containing the rodrigues parameters of a rotation matrix */
typedef Eigen::Vector3d rodrigues_t;
/** A rotation matrix */
typedef Eigen::Matrix3d rotation_t;
/** A 3x4 transformation matrix containing rotation \f$ \mathbf{R} \f$ and
* translation \f$ \mathbf{t} \f$ as follows:
* \f$ \left( \begin{array}{cc} \mathbf{R} & \mathbf{t} \end{array} \right) \f$
*/
typedef Eigen::Matrix<double,3,4> transformation_t;
/** A 3-vector describing a translation/camera position */
typedef Eigen::Vector3d translation_t;
private:
void CheckInliers();
bool Refine();
//Functions from de original MLPnP code
/*
* Computes the camera pose given 3D points coordinates (in the camera reference
* system), the camera rays and (optionally) the covariance matrix of those camera rays.
* Result is stored in solution
*/
void computePose(
const bearingVectors_t & f,
const points_t & p,
const cov3_mats_t & covMats,
const std::vector<int>& indices,
transformation_t & result);
void mlpnp_gn(Eigen::VectorXd& x,
const points_t& pts,
const std::vector<Eigen::MatrixXd>& nullspaces,
const Eigen::SparseMatrix<double> Kll,
bool use_cov);
void mlpnp_residuals_and_jacs(
const Eigen::VectorXd& x,
const points_t& pts,
const std::vector<Eigen::MatrixXd>& nullspaces,
Eigen::VectorXd& r,
Eigen::MatrixXd& fjac,
bool getJacs);
void mlpnpJacs(
const point_t& pt,
const Eigen::Vector3d& nullspace_r,
const Eigen::Vector3d& nullspace_s,
const rodrigues_t& w,
const translation_t& t,
Eigen::MatrixXd& jacs);
//Auxiliar methods
/**
* \brief Compute a rotation matrix from Rodrigues axis angle.
*
* \param[in] omega The Rodrigues-parameters of a rotation.
* \return The 3x3 rotation matrix.
*/
Eigen::Matrix3d rodrigues2rot(const Eigen::Vector3d & omega);
/**
* \brief Compute the Rodrigues-parameters of a rotation matrix.
*
* \param[in] R The 3x3 rotation matrix.
* \return The Rodrigues-parameters.
*/
Eigen::Vector3d rot2rodrigues(const Eigen::Matrix3d & R);
//----------------------------------------------------
//Fields of the solver
//----------------------------------------------------
vector<MapPoint*> mvpMapPointMatches;
// 2D Points
vector<cv::Point2f> mvP2D;
//Substitued by bearing vectors
bearingVectors_t mvBearingVecs;
vector<float> mvSigma2;
// 3D Points
//vector<cv::Point3f> mvP3Dw;
points_t mvP3Dw;
// Index in Frame
vector<size_t> mvKeyPointIndices;
// Current Estimation
double mRi[3][3];
double mti[3];
cv::Mat mTcwi;
vector<bool> mvbInliersi;
int mnInliersi;
// Current Ransac State
int mnIterations;
vector<bool> mvbBestInliers;
int mnBestInliers;
cv::Mat mBestTcw;
// Refined
cv::Mat mRefinedTcw;
vector<bool> mvbRefinedInliers;
int mnRefinedInliers;
// Number of Correspondences
int N;
// Indices for random selection [0 .. N-1]
vector<size_t> mvAllIndices;
// RANSAC probability
double mRansacProb;
// RANSAC min inliers
int mRansacMinInliers;
// RANSAC max iterations
int mRansacMaxIts;
// RANSAC expected inliers/total ratio
float mRansacEpsilon;
// RANSAC Threshold inlier/outlier. Max error e = dist(P1,T_12*P2)^2
float mRansacTh;
// RANSAC Minimun Set used at each iteration
int mRansacMinSet;
// Max square error associated with scale level. Max error = th*th*sigma(level)*sigma(level)
vector<float> mvMaxError;
GeometricCamera* mpCamera;
};
}
#endif //ORB_SLAM3_MLPNPSOLVER_H

165
include/Map.h Normal file
View File

@@ -0,0 +1,165 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef MAP_H
#define MAP_H
#include "MapPoint.h"
#include "KeyFrame.h"
#include <set>
#include <pangolin/pangolin.h>
#include <mutex>
#include <boost/serialization/base_object.hpp>
namespace ORB_SLAM3
{
class MapPoint;
class KeyFrame;
class Atlas;
class KeyFrameDatabase;
class GeometricCamera;
class Map
{
public:
Map();
Map(int initKFid);
~Map();
void AddKeyFrame(KeyFrame* pKF);
void AddMapPoint(MapPoint* pMP);
void EraseMapPoint(MapPoint* pMP);
void EraseKeyFrame(KeyFrame* pKF);
void SetReferenceMapPoints(const std::vector<MapPoint*> &vpMPs);
void InformNewBigChange();
int GetLastBigChangeIdx();
std::vector<KeyFrame*> GetAllKeyFrames();
std::vector<MapPoint*> GetAllMapPoints();
std::vector<MapPoint*> GetReferenceMapPoints();
long unsigned int MapPointsInMap();
long unsigned KeyFramesInMap();
long unsigned int GetId();
long unsigned int GetInitKFid();
void SetInitKFid(long unsigned int initKFif);
long unsigned int GetMaxKFid();
KeyFrame* GetOriginKF();
void SetCurrentMap();
void SetStoredMap();
bool HasThumbnail();
bool IsInUse();
void SetBad();
bool IsBad();
void clear();
int GetMapChangeIndex();
void IncreaseChangeIndex();
int GetLastMapChange();
void SetLastMapChange(int currentChangeId);
void SetImuInitialized();
bool isImuInitialized();
void RotateMap(const cv::Mat &R);
void ApplyScaledRotation(const cv::Mat &R, const float s, const bool bScaledVel=false, const cv::Mat t=cv::Mat::zeros(cv::Size(1,3),CV_32F));
void SetInertialSensor();
bool IsInertial();
void SetIniertialBA1();
void SetIniertialBA2();
bool GetIniertialBA1();
bool GetIniertialBA2();
void PrintEssentialGraph();
bool CheckEssentialGraph();
void ChangeId(long unsigned int nId);
unsigned int GetLowerKFID();
vector<KeyFrame*> mvpKeyFrameOrigins;
vector<unsigned long int> mvBackupKeyFrameOriginsId;
KeyFrame* mpFirstRegionKF;
std::mutex mMutexMapUpdate;
// This avoid that two points are created simultaneously in separate threads (id conflict)
std::mutex mMutexPointCreation;
bool mbFail;
// Size of the thumbnail (always in power of 2)
static const int THUMB_WIDTH = 512;
static const int THUMB_HEIGHT = 512;
static long unsigned int nNextId;
protected:
long unsigned int mnId;
std::set<MapPoint*> mspMapPoints;
std::set<KeyFrame*> mspKeyFrames;
KeyFrame* mpKFinitial;
KeyFrame* mpKFlowerID;
std::vector<MapPoint*> mvpReferenceMapPoints;
bool mbImuInitialized;
int mnMapChange;
int mnMapChangeNotified;
long unsigned int mnInitKFid;
long unsigned int mnMaxKFid;
long unsigned int mnLastLoopKFid;
// Index related to a big change in the map (loop closure, global BA)
int mnBigChangeIdx;
// View of the map in aerial sight (for the AtlasViewer)
GLubyte* mThumbnail;
bool mIsInUse;
bool mHasTumbnail;
bool mbBad = false;
bool mbIsInertial;
bool mbIMU_BA1;
bool mbIMU_BA2;
std::mutex mMutexMap;
};
} //namespace ORB_SLAM3
#endif // MAP_H

73
include/MapDrawer.h Normal file
View File

@@ -0,0 +1,73 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef MAPDRAWER_H
#define MAPDRAWER_H
#include"Atlas.h"
#include"MapPoint.h"
#include"KeyFrame.h"
#include<pangolin/pangolin.h>
#include<mutex>
namespace ORB_SLAM3
{
class MapDrawer
{
public:
MapDrawer(Atlas* pAtlas, const string &strSettingPath);
Atlas* mpAtlas;
void DrawMapPoints();
void DrawKeyFrames(const bool bDrawKF, const bool bDrawGraph, const bool bDrawInertialGraph);
void DrawCurrentCamera(pangolin::OpenGlMatrix &Twc);
void SetCurrentCameraPose(const cv::Mat &Tcw);
void SetReferenceKeyFrame(KeyFrame *pKF);
void GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M, pangolin::OpenGlMatrix &MOw);
void GetCurrentOpenGLCameraMatrix(pangolin::OpenGlMatrix &M, pangolin::OpenGlMatrix &MOw, pangolin::OpenGlMatrix &MTwwp);
private:
bool ParseViewerParamFile(cv::FileStorage &fSettings);
float mKeyFrameSize;
float mKeyFrameLineWidth;
float mGraphLineWidth;
float mPointSize;
float mCameraSize;
float mCameraLineWidth;
cv::Mat mCameraPose;
std::mutex mMutexCamera;
float mfFrameColors[6][3] = {{0.0f, 0.0f, 1.0f},
{0.8f, 0.4f, 1.0f},
{1.0f, 0.2f, 0.4f},
{0.6f, 0.0f, 1.0f},
{1.0f, 1.0f, 0.0f},
{0.0f, 1.0f, 1.0f}};
};
} //namespace ORB_SLAM
#endif // MAPDRAWER_H

187
include/MapPoint.h Normal file
View File

@@ -0,0 +1,187 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef MAPPOINT_H
#define MAPPOINT_H
#include"KeyFrame.h"
#include"Frame.h"
#include"Map.h"
#include<opencv2/core/core.hpp>
#include<mutex>
#include <boost/serialization/serialization.hpp>
#include <boost/serialization/array.hpp>
#include <boost/serialization/map.hpp>
namespace ORB_SLAM3
{
class KeyFrame;
class Map;
class Frame;
class MapPoint
{
public:
MapPoint();
MapPoint(const cv::Mat &Pos, KeyFrame* pRefKF, Map* pMap);
MapPoint(const double invDepth, cv::Point2f uv_init, KeyFrame* pRefKF, KeyFrame* pHostKF, Map* pMap);
MapPoint(const cv::Mat &Pos, Map* pMap, Frame* pFrame, const int &idxF);
void SetWorldPos(const cv::Mat &Pos);
cv::Mat GetWorldPos();
cv::Mat GetNormal();
cv::Matx31f GetWorldPos2();
cv::Matx31f GetNormal2();
KeyFrame* GetReferenceKeyFrame();
std::map<KeyFrame*,std::tuple<int,int>> GetObservations();
int Observations();
void AddObservation(KeyFrame* pKF,int idx);
void EraseObservation(KeyFrame* pKF);
std::tuple<int,int> GetIndexInKeyFrame(KeyFrame* pKF);
bool IsInKeyFrame(KeyFrame* pKF);
void SetBadFlag();
bool isBad();
void Replace(MapPoint* pMP);
MapPoint* GetReplaced();
void IncreaseVisible(int n=1);
void IncreaseFound(int n=1);
float GetFoundRatio();
inline int GetFound(){
return mnFound;
}
void ComputeDistinctiveDescriptors();
cv::Mat GetDescriptor();
void UpdateNormalAndDepth();
void SetNormalVector(cv::Mat& normal);
float GetMinDistanceInvariance();
float GetMaxDistanceInvariance();
int PredictScale(const float &currentDist, KeyFrame*pKF);
int PredictScale(const float &currentDist, Frame* pF);
Map* GetMap();
void UpdateMap(Map* pMap);
public:
long unsigned int mnId;
static long unsigned int nNextId;
long int mnFirstKFid;
long int mnFirstFrame;
int nObs;
// Variables used by the tracking
float mTrackProjX;
float mTrackProjY;
float mTrackDepth;
float mTrackDepthR;
float mTrackProjXR;
float mTrackProjYR;
bool mbTrackInView, mbTrackInViewR;
int mnTrackScaleLevel, mnTrackScaleLevelR;
float mTrackViewCos, mTrackViewCosR;
long unsigned int mnTrackReferenceForFrame;
long unsigned int mnLastFrameSeen;
// Variables used by local mapping
long unsigned int mnBALocalForKF;
long unsigned int mnFuseCandidateForKF;
// Variables used by loop closing
long unsigned int mnLoopPointForKF;
long unsigned int mnCorrectedByKF;
long unsigned int mnCorrectedReference;
cv::Mat mPosGBA;
long unsigned int mnBAGlobalForKF;
long unsigned int mnBALocalForMerge;
// Variable used by merging
cv::Mat mPosMerge;
cv::Mat mNormalVectorMerge;
// Fopr inverse depth optimization
double mInvDepth;
double mInitU;
double mInitV;
KeyFrame* mpHostKF;
static std::mutex mGlobalMutex;
unsigned int mnOriginMapId;
protected:
// Position in absolute coordinates
cv::Mat mWorldPos;
cv::Matx31f mWorldPosx;
// Keyframes observing the point and associated index in keyframe
std::map<KeyFrame*,std::tuple<int,int> > mObservations;
// Mean viewing direction
cv::Mat mNormalVector;
cv::Matx31f mNormalVectorx;
// Best descriptor to fast matching
cv::Mat mDescriptor;
// Reference KeyFrame
KeyFrame* mpRefKF;
// Tracking counters
int mnVisible;
int mnFound;
// Bad flag (we do not currently erase MapPoint from memory)
bool mbBad;
MapPoint* mpReplaced;
// Scale invariance distances
float mfMinDistance;
float mfMaxDistance;
Map* mpMap;
std::mutex mMutexPos;
std::mutex mMutexFeatures;
std::mutex mMutexMap;
};
} //namespace ORB_SLAM
#endif // MAPPOINT_H

34
include/ORBVocabulary.h Normal file
View File

@@ -0,0 +1,34 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ORBVOCABULARY_H
#define ORBVOCABULARY_H
#include"Thirdparty/DBoW2/DBoW2/FORB.h"
#include"Thirdparty/DBoW2/DBoW2/TemplatedVocabulary.h"
namespace ORB_SLAM3
{
typedef DBoW2::TemplatedVocabulary<DBoW2::FORB::TDescriptor, DBoW2::FORB>
ORBVocabulary;
} //namespace ORB_SLAM
#endif // ORBVOCABULARY_H

114
include/ORBextractor.h Normal file
View File

@@ -0,0 +1,114 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ORBEXTRACTOR_H
#define ORBEXTRACTOR_H
#include <vector>
#include <list>
#include <opencv2/opencv.hpp>
namespace ORB_SLAM3
{
class ExtractorNode
{
public:
ExtractorNode():bNoMore(false){}
void DivideNode(ExtractorNode &n1, ExtractorNode &n2, ExtractorNode &n3, ExtractorNode &n4);
std::vector<cv::KeyPoint> vKeys;
cv::Point2i UL, UR, BL, BR;
std::list<ExtractorNode>::iterator lit;
bool bNoMore;
};
class ORBextractor
{
public:
enum {HARRIS_SCORE=0, FAST_SCORE=1 };
ORBextractor(int nfeatures, float scaleFactor, int nlevels,
int iniThFAST, int minThFAST);
~ORBextractor(){}
// Compute the ORB features and descriptors on an image.
// ORB are dispersed on the image using an octree.
// Mask is ignored in the current implementation.
int operator()( cv::InputArray _image, cv::InputArray _mask,
std::vector<cv::KeyPoint>& _keypoints,
cv::OutputArray _descriptors, std::vector<int> &vLappingArea);
int inline GetLevels(){
return nlevels;}
float inline GetScaleFactor(){
return scaleFactor;}
std::vector<float> inline GetScaleFactors(){
return mvScaleFactor;
}
std::vector<float> inline GetInverseScaleFactors(){
return mvInvScaleFactor;
}
std::vector<float> inline GetScaleSigmaSquares(){
return mvLevelSigma2;
}
std::vector<float> inline GetInverseScaleSigmaSquares(){
return mvInvLevelSigma2;
}
std::vector<cv::Mat> mvImagePyramid;
protected:
void ComputePyramid(cv::Mat image);
void ComputeKeyPointsOctTree(std::vector<std::vector<cv::KeyPoint> >& allKeypoints);
std::vector<cv::KeyPoint> DistributeOctTree(const std::vector<cv::KeyPoint>& vToDistributeKeys, const int &minX,
const int &maxX, const int &minY, const int &maxY, const int &nFeatures, const int &level);
void ComputeKeyPointsOld(std::vector<std::vector<cv::KeyPoint> >& allKeypoints);
std::vector<cv::Point> pattern;
int nfeatures;
double scaleFactor;
int nlevels;
int iniThFAST;
int minThFAST;
std::vector<int> mnFeaturesPerLevel;
std::vector<int> umax;
std::vector<float> mvScaleFactor;
std::vector<float> mvInvScaleFactor;
std::vector<float> mvLevelSigma2;
std::vector<float> mvInvLevelSigma2;
};
} //namespace ORB_SLAM
#endif

115
include/ORBmatcher.h Normal file
View File

@@ -0,0 +1,115 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ORBMATCHER_H
#define ORBMATCHER_H
#include<vector>
#include<opencv2/core/core.hpp>
#include<opencv2/features2d/features2d.hpp>
#include"MapPoint.h"
#include"KeyFrame.h"
#include"Frame.h"
namespace ORB_SLAM3
{
class ORBmatcher
{
public:
ORBmatcher(float nnratio=0.6, bool checkOri=true);
// Computes the Hamming distance between two ORB descriptors
static int DescriptorDistance(const cv::Mat &a, const cv::Mat &b);
// Search matches between Frame keypoints and projected MapPoints. Returns number of matches
// Used to track the local map (Tracking)
int SearchByProjection(Frame &F, const std::vector<MapPoint*> &vpMapPoints, const float th=3, const bool bFarPoints = false, const float thFarPoints = 50.0f);
// Project MapPoints tracked in last frame into the current frame and search matches.
// Used to track from previous frame (Tracking)
int SearchByProjection(Frame &CurrentFrame, const Frame &LastFrame, const float th, const bool bMono);
// Project MapPoints seen in KeyFrame into the Frame and search matches.
// Used in relocalisation (Tracking)
int SearchByProjection(Frame &CurrentFrame, KeyFrame* pKF, const std::set<MapPoint*> &sAlreadyFound, const float th, const int ORBdist);
// Project MapPoints using a Similarity Transformation and search matches.
// Used in loop detection (Loop Closing)
int SearchByProjection(KeyFrame* pKF, cv::Mat Scw, const std::vector<MapPoint*> &vpPoints, std::vector<MapPoint*> &vpMatched, int th, float ratioHamming=1.0);
// Project MapPoints using a Similarity Transformation and search matches.
// Used in Place Recognition (Loop Closing and Merging)
int SearchByProjection(KeyFrame* pKF, cv::Mat Scw, const std::vector<MapPoint*> &vpPoints, const std::vector<KeyFrame*> &vpPointsKFs, std::vector<MapPoint*> &vpMatched, std::vector<KeyFrame*> &vpMatchedKF, int th, float ratioHamming=1.0);
// Search matches between MapPoints in a KeyFrame and ORB in a Frame.
// Brute force constrained to ORB that belong to the same vocabulary node (at a certain level)
// Used in Relocalisation and Loop Detection
int SearchByBoW(KeyFrame *pKF, Frame &F, std::vector<MapPoint*> &vpMapPointMatches);
int SearchByBoW(KeyFrame *pKF1, KeyFrame* pKF2, std::vector<MapPoint*> &vpMatches12);
// Matching for the Map Initialization (only used in the monocular case)
int SearchForInitialization(Frame &F1, Frame &F2, std::vector<cv::Point2f> &vbPrevMatched, std::vector<int> &vnMatches12, int windowSize=10);
// Matching to triangulate new MapPoints. Check Epipolar Constraint.
int SearchForTriangulation(KeyFrame *pKF1, KeyFrame* pKF2, cv::Mat F12,
std::vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, const bool bCoarse = false);
int SearchForTriangulation_(KeyFrame *pKF1, KeyFrame* pKF2, cv::Matx33f F12,
std::vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, const bool bCoarse = false);
int SearchForTriangulation(KeyFrame *pKF1, KeyFrame *pKF2, cv::Mat F12,
vector<pair<size_t, size_t> > &vMatchedPairs, const bool bOnlyStereo, vector<cv::Mat> &vMatchedPoints);
// Search matches between MapPoints seen in KF1 and KF2 transforming by a Sim3 [s12*R12|t12]
// In the stereo and RGB-D case, s12=1
int SearchBySim3(KeyFrame* pKF1, KeyFrame* pKF2, std::vector<MapPoint *> &vpMatches12, const float &s12, const cv::Mat &R12, const cv::Mat &t12, const float th);
// Project MapPoints into KeyFrame and search for duplicated MapPoints.
int Fuse(KeyFrame* pKF, const vector<MapPoint *> &vpMapPoints, const float th=3.0, const bool bRight = false);
// Project MapPoints into KeyFrame using a given Sim3 and search for duplicated MapPoints.
int Fuse(KeyFrame* pKF, cv::Mat Scw, const std::vector<MapPoint*> &vpPoints, float th, vector<MapPoint *> &vpReplacePoint);
public:
static const int TH_LOW;
static const int TH_HIGH;
static const int HISTO_LENGTH;
protected:
bool CheckDistEpipolarLine(const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &F12, const KeyFrame *pKF, const bool b1=false);
bool CheckDistEpipolarLine2(const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &F12, const KeyFrame *pKF, const float unc);
float RadiusByViewingCos(const float &viewCos);
void ComputeThreeMaxima(std::vector<int>* histo, const int L, int &ind1, int &ind2, int &ind3);
float mfNNratio;
bool mbCheckOrientation;
};
}// namespace ORB_SLAM
#endif // ORBMATCHER_H

219
include/OptimizableTypes.h Normal file
View File

@@ -0,0 +1,219 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ORB_SLAM3_OPTIMIZABLETYPES_H
#define ORB_SLAM3_OPTIMIZABLETYPES_H
#include "Thirdparty/g2o/g2o/core/base_unary_edge.h"
#include <Thirdparty/g2o/g2o/types/types_six_dof_expmap.h>
#include <Thirdparty/g2o/g2o/types/sim3.h>
#include <Eigen/Geometry>
#include <include/CameraModels/GeometricCamera.h>
namespace ORB_SLAM3 {
class EdgeSE3ProjectXYZOnlyPose: public g2o::BaseUnaryEdge<2, Eigen::Vector2d, g2o::VertexSE3Expmap>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeSE3ProjectXYZOnlyPose(){}
bool read(std::istream& is);
bool write(std::ostream& os) const;
void computeError() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-pCamera->project(v1->estimate().map(Xw));
}
bool isDepthPositive() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[0]);
return (v1->estimate().map(Xw))(2)>0.0;
}
virtual void linearizeOplus();
Eigen::Vector3d Xw;
GeometricCamera* pCamera;
};
class EdgeSE3ProjectXYZOnlyPoseToBody: public g2o::BaseUnaryEdge<2, Eigen::Vector2d, g2o::VertexSE3Expmap>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeSE3ProjectXYZOnlyPoseToBody(){}
bool read(std::istream& is);
bool write(std::ostream& os) const;
void computeError() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-pCamera->project((mTrl * v1->estimate()).map(Xw));
}
bool isDepthPositive() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[0]);
return ((mTrl * v1->estimate()).map(Xw))(2)>0.0;
}
virtual void linearizeOplus();
Eigen::Vector3d Xw;
GeometricCamera* pCamera;
g2o::SE3Quat mTrl;
};
class EdgeSE3ProjectXYZ: public g2o::BaseBinaryEdge<2, Eigen::Vector2d, g2o::VertexSBAPointXYZ, g2o::VertexSE3Expmap>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeSE3ProjectXYZ();
bool read(std::istream& is);
bool write(std::ostream& os) const;
void computeError() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-pCamera->project(v1->estimate().map(v2->estimate()));
}
bool isDepthPositive() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
return ((v1->estimate().map(v2->estimate()))(2)>0.0);
}
virtual void linearizeOplus();
GeometricCamera* pCamera;
};
class EdgeSE3ProjectXYZToBody: public g2o::BaseBinaryEdge<2, Eigen::Vector2d, g2o::VertexSBAPointXYZ, g2o::VertexSE3Expmap>{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeSE3ProjectXYZToBody();
bool read(std::istream& is);
bool write(std::ostream& os) const;
void computeError() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-pCamera->project((mTrl * v1->estimate()).map(v2->estimate()));
}
bool isDepthPositive() {
const g2o::VertexSE3Expmap* v1 = static_cast<const g2o::VertexSE3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
return ((mTrl * v1->estimate()).map(v2->estimate()))(2)>0.0;
}
virtual void linearizeOplus();
GeometricCamera* pCamera;
g2o::SE3Quat mTrl;
};
class VertexSim3Expmap : public g2o::BaseVertex<7, g2o::Sim3>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
VertexSim3Expmap();
virtual bool read(std::istream& is);
virtual bool write(std::ostream& os) const;
virtual void setToOriginImpl() {
_estimate = g2o::Sim3();
}
virtual void oplusImpl(const double* update_)
{
Eigen::Map<g2o::Vector7d> update(const_cast<double*>(update_));
if (_fix_scale)
update[6] = 0;
g2o::Sim3 s(update);
setEstimate(s*estimate());
}
GeometricCamera* pCamera1, *pCamera2;
bool _fix_scale;
};
class EdgeSim3ProjectXYZ : public g2o::BaseBinaryEdge<2, Eigen::Vector2d, g2o::VertexSBAPointXYZ, ORB_SLAM3::VertexSim3Expmap>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeSim3ProjectXYZ();
virtual bool read(std::istream& is);
virtual bool write(std::ostream& os) const;
void computeError()
{
const ORB_SLAM3::VertexSim3Expmap* v1 = static_cast<const ORB_SLAM3::VertexSim3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-v1->pCamera1->project(v1->estimate().map(v2->estimate()));
}
// virtual void linearizeOplus();
};
class EdgeInverseSim3ProjectXYZ : public g2o::BaseBinaryEdge<2, Eigen::Vector2d, g2o::VertexSBAPointXYZ, VertexSim3Expmap>
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
EdgeInverseSim3ProjectXYZ();
virtual bool read(std::istream& is);
virtual bool write(std::ostream& os) const;
void computeError()
{
const ORB_SLAM3::VertexSim3Expmap* v1 = static_cast<const ORB_SLAM3::VertexSim3Expmap*>(_vertices[1]);
const g2o::VertexSBAPointXYZ* v2 = static_cast<const g2o::VertexSBAPointXYZ*>(_vertices[0]);
Eigen::Vector2d obs(_measurement);
_error = obs-v1->pCamera2->project((v1->estimate().inverse().map(v2->estimate())));
}
// virtual void linearizeOplus();
};
}
#endif //ORB_SLAM3_OPTIMIZABLETYPES_H

123
include/Optimizer.h Normal file
View File

@@ -0,0 +1,123 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef OPTIMIZER_H
#define OPTIMIZER_H
#include "Map.h"
#include "MapPoint.h"
#include "KeyFrame.h"
#include "LoopClosing.h"
#include "Frame.h"
#include <math.h>
#include "Thirdparty/g2o/g2o/types/types_seven_dof_expmap.h"
#include "Thirdparty/g2o/g2o/core/sparse_block_matrix.h"
#include "Thirdparty/g2o/g2o/core/block_solver.h"
#include "Thirdparty/g2o/g2o/core/optimization_algorithm_levenberg.h"
#include "Thirdparty/g2o/g2o/core/optimization_algorithm_gauss_newton.h"
#include "Thirdparty/g2o/g2o/solvers/linear_solver_eigen.h"
#include "Thirdparty/g2o/g2o/types/types_six_dof_expmap.h"
#include "Thirdparty/g2o/g2o/core/robust_kernel_impl.h"
#include "Thirdparty/g2o/g2o/solvers/linear_solver_dense.h"
namespace ORB_SLAM3
{
class LoopClosing;
class Optimizer
{
public:
void static BundleAdjustment(const std::vector<KeyFrame*> &vpKF, const std::vector<MapPoint*> &vpMP,
int nIterations = 5, bool *pbStopFlag=NULL, const unsigned long nLoopKF=0,
const bool bRobust = true);
void static GlobalBundleAdjustemnt(Map* pMap, int nIterations=5, bool *pbStopFlag=NULL,
const unsigned long nLoopKF=0, const bool bRobust = true);
void static FullInertialBA(Map *pMap, int its, const bool bFixLocal=false, const unsigned long nLoopKF=0, bool *pbStopFlag=NULL, bool bInit=false, float priorG = 1e2, float priorA=1e6, Eigen::VectorXd *vSingVal = NULL, bool *bHess=NULL);
void static LocalBundleAdjustment(KeyFrame* pKF, bool *pbStopFlag, vector<KeyFrame*> &vpNonEnoughOptKFs);
void static LocalBundleAdjustment(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, int& num_fixedKF, int& num_OptKF, int& num_MPs, int& num_edges);
void static MergeBundleAdjustmentVisual(KeyFrame* pCurrentKF, vector<KeyFrame*> vpWeldingKFs, vector<KeyFrame*> vpFixedKFs, bool *pbStopFlag);
int static PoseOptimization(Frame* pFrame);
int static PoseInertialOptimizationLastKeyFrame(Frame* pFrame, bool bRecInit = false);
int static PoseInertialOptimizationLastFrame(Frame *pFrame, bool bRecInit = false);
// if bFixScale is true, 6DoF optimization (stereo,rgbd), 7DoF otherwise (mono)
void static OptimizeEssentialGraph(Map* pMap, KeyFrame* pLoopKF, KeyFrame* pCurKF,
const LoopClosing::KeyFrameAndPose &NonCorrectedSim3,
const LoopClosing::KeyFrameAndPose &CorrectedSim3,
const map<KeyFrame *, set<KeyFrame *> > &LoopConnections,
const bool &bFixScale);
void static OptimizeEssentialGraph6DoF(KeyFrame* pCurKF, vector<KeyFrame*> &vpFixedKFs, vector<KeyFrame*> &vpFixedCorrectedKFs,
vector<KeyFrame*> &vpNonFixedKFs, vector<MapPoint*> &vpNonCorrectedMPs, double scale);
void static OptimizeEssentialGraph(KeyFrame* pCurKF, vector<KeyFrame*> &vpFixedKFs, vector<KeyFrame*> &vpFixedCorrectedKFs,
vector<KeyFrame*> &vpNonFixedKFs, vector<MapPoint*> &vpNonCorrectedMPs);
void static OptimizeEssentialGraph(KeyFrame* pCurKF,
const LoopClosing::KeyFrameAndPose &NonCorrectedSim3,
const LoopClosing::KeyFrameAndPose &CorrectedSim3);
// For inetial loopclosing
void static OptimizeEssentialGraph4DoF(Map* pMap, KeyFrame* pLoopKF, KeyFrame* pCurKF,
const LoopClosing::KeyFrameAndPose &NonCorrectedSim3,
const LoopClosing::KeyFrameAndPose &CorrectedSim3,
const map<KeyFrame *, set<KeyFrame *> > &LoopConnections);
// if bFixScale is true, optimize SE3 (stereo,rgbd), Sim3 otherwise (mono) (OLD)
static int OptimizeSim3(KeyFrame* pKF1, KeyFrame* pKF2, std::vector<MapPoint *> &vpMatches1,
g2o::Sim3 &g2oS12, const float th2, const bool bFixScale);
// if bFixScale is true, optimize SE3 (stereo,rgbd), Sim3 otherwise (mono) (NEW)
static int OptimizeSim3(KeyFrame* pKF1, KeyFrame* pKF2, std::vector<MapPoint *> &vpMatches1,
g2o::Sim3 &g2oS12, const float th2, const bool bFixScale,
Eigen::Matrix<double,7,7> &mAcumHessian, const bool bAllPoints=false);
static int OptimizeSim3(KeyFrame *pKF1, KeyFrame *pKF2, vector<MapPoint *> &vpMatches1, vector<KeyFrame*> &vpMatches1KF,
g2o::Sim3 &g2oS12, const float th2, const bool bFixScale, Eigen::Matrix<double,7,7> &mAcumHessian,
const bool bAllPoints = false);
// For inertial systems
void static LocalInertialBA(KeyFrame* pKF, bool *pbStopFlag, Map *pMap, int& num_fixedKF, int& num_OptKF, int& num_MPs, int& num_edges, bool bLarge = false, bool bRecInit = false);
void static MergeInertialBA(KeyFrame* pCurrKF, KeyFrame* pMergeKF, bool *pbStopFlag, Map *pMap, LoopClosing::KeyFrameAndPose &corrPoses);
// Local BA in welding area when two maps are merged
void static LocalBundleAdjustment(KeyFrame* pMainKF,vector<KeyFrame*> vpAdjustKF, vector<KeyFrame*> vpFixedKF, bool *pbStopFlag);
// Marginalize block element (start:end,start:end). Perform Schur complement.
// Marginalized elements are filled with zeros.
static Eigen::MatrixXd Marginalize(const Eigen::MatrixXd &H, const int &start, const int &end);
// Condition block element (start:end,start:end). Fill with zeros.
static Eigen::MatrixXd Condition(const Eigen::MatrixXd &H, const int &start, const int &end);
// Remove link between element 1 and 2. Given elements 1,2 and 3 must define the whole matrix.
static Eigen::MatrixXd Sparsify(const Eigen::MatrixXd &H, const int &start1, const int &end1, const int &start2, const int &end2);
// Inertial pose-graph
void static InertialOptimization(Map *pMap, Eigen::Matrix3d &Rwg, double &scale, Eigen::Vector3d &bg, Eigen::Vector3d &ba, bool bMono, Eigen::MatrixXd &covInertial, bool bFixedVel=false, bool bGauss=false, float priorG = 1e2, float priorA = 1e6);
void static InertialOptimization(Map *pMap, Eigen::Vector3d &bg, Eigen::Vector3d &ba, float priorG = 1e2, float priorA = 1e6);
void static InertialOptimization(vector<KeyFrame*> vpKFs, Eigen::Vector3d &bg, Eigen::Vector3d &ba, float priorG = 1e2, float priorA = 1e6);
void static InertialOptimization(Map *pMap, Eigen::Matrix3d &Rwg, double &scale);
};
} //namespace ORB_SLAM3
#endif // OPTIMIZER_H

195
include/PnPsolver.h Normal file
View File

@@ -0,0 +1,195 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Copyright (c) 2009, V. Lepetit, EPFL
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those
* of the authors and should not be interpreted as representing official policies,
* either expressed or implied, of the FreeBSD Project
*/
#ifndef PNPSOLVER_H
#define PNPSOLVER_H
#include <opencv2/core/core.hpp>
#include "MapPoint.h"
#include "Frame.h"
namespace ORB_SLAM3
{
class PnPsolver {
public:
PnPsolver(const Frame &F, const vector<MapPoint*> &vpMapPointMatches);
~PnPsolver();
void SetRansacParameters(double probability = 0.99, int minInliers = 8 , int maxIterations = 300, int minSet = 4, float epsilon = 0.4,
float th2 = 5.991);
cv::Mat find(vector<bool> &vbInliers, int &nInliers);
cv::Mat iterate(int nIterations, bool &bNoMore, vector<bool> &vbInliers, int &nInliers);
private:
void CheckInliers();
bool Refine();
// Functions from the original EPnP code
void set_maximum_number_of_correspondences(const int n);
void reset_correspondences(void);
void add_correspondence(const double X, const double Y, const double Z,
const double u, const double v);
double compute_pose(double R[3][3], double T[3]);
void relative_error(double & rot_err, double & transl_err,
const double Rtrue[3][3], const double ttrue[3],
const double Rest[3][3], const double test[3]);
void print_pose(const double R[3][3], const double t[3]);
double reprojection_error(const double R[3][3], const double t[3]);
void choose_control_points(void);
void compute_barycentric_coordinates(void);
void fill_M(CvMat * M, const int row, const double * alphas, const double u, const double v);
void compute_ccs(const double * betas, const double * ut);
void compute_pcs(void);
void solve_for_sign(void);
void find_betas_approx_1(const CvMat * L_6x10, const CvMat * Rho, double * betas);
void find_betas_approx_2(const CvMat * L_6x10, const CvMat * Rho, double * betas);
void find_betas_approx_3(const CvMat * L_6x10, const CvMat * Rho, double * betas);
void qr_solve(CvMat * A, CvMat * b, CvMat * X);
double dot(const double * v1, const double * v2);
double dist2(const double * p1, const double * p2);
void compute_rho(double * rho);
void compute_L_6x10(const double * ut, double * l_6x10);
void gauss_newton(const CvMat * L_6x10, const CvMat * Rho, double current_betas[4]);
void compute_A_and_b_gauss_newton(const double * l_6x10, const double * rho,
double cb[4], CvMat * A, CvMat * b);
double compute_R_and_t(const double * ut, const double * betas,
double R[3][3], double t[3]);
void estimate_R_and_t(double R[3][3], double t[3]);
void copy_R_and_t(const double R_dst[3][3], const double t_dst[3],
double R_src[3][3], double t_src[3]);
void mat_to_quat(const double R[3][3], double q[4]);
double uc, vc, fu, fv;
double * pws, * us, * alphas, * pcs;
int maximum_number_of_correspondences;
int number_of_correspondences;
double cws[4][3], ccs[4][3];
double cws_determinant;
vector<MapPoint*> mvpMapPointMatches;
// 2D Points
vector<cv::Point2f> mvP2D;
vector<float> mvSigma2;
// 3D Points
vector<cv::Point3f> mvP3Dw;
// Index in Frame
vector<size_t> mvKeyPointIndices;
// Current Estimation
double mRi[3][3];
double mti[3];
cv::Mat mTcwi;
vector<bool> mvbInliersi;
int mnInliersi;
// Current Ransac State
int mnIterations;
vector<bool> mvbBestInliers;
int mnBestInliers;
cv::Mat mBestTcw;
// Refined
cv::Mat mRefinedTcw;
vector<bool> mvbRefinedInliers;
int mnRefinedInliers;
// Number of Correspondences
int N;
// Indices for random selection [0 .. N-1]
vector<size_t> mvAllIndices;
// RANSAC probability
double mRansacProb;
// RANSAC min inliers
int mRansacMinInliers;
// RANSAC max iterations
int mRansacMaxIts;
// RANSAC expected inliers/total ratio
float mRansacEpsilon;
// RANSAC Threshold inlier/outlier. Max error e = dist(P1,T_12*P2)^2
float mRansacTh;
// RANSAC Minimun Set used at each iteration
int mRansacMinSet;
// Max square error associated with scale level. Max error = th*th*sigma(level)*sigma(level)
vector<float> mvMaxError;
};
} //namespace ORB_SLAM
#endif //PNPSOLVER_H

134
include/Sim3Solver.h Normal file
View File

@@ -0,0 +1,134 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef SIM3SOLVER_H
#define SIM3SOLVER_H
#include <opencv2/opencv.hpp>
#include <vector>
#include "KeyFrame.h"
namespace ORB_SLAM3
{
class Sim3Solver
{
public:
Sim3Solver(KeyFrame* pKF1, KeyFrame* pKF2, const std::vector<MapPoint*> &vpMatched12, const bool bFixScale = true,
const vector<KeyFrame*> vpKeyFrameMatchedMP = vector<KeyFrame*>());
void SetRansacParameters(double probability = 0.99, int minInliers = 6 , int maxIterations = 300);
cv::Mat find(std::vector<bool> &vbInliers12, int &nInliers);
cv::Mat iterate(int nIterations, bool &bNoMore, std::vector<bool> &vbInliers, int &nInliers);
cv::Mat iterate(int nIterations, bool &bNoMore, vector<bool> &vbInliers, int &nInliers, bool &bConverge);
cv::Mat GetEstimatedRotation();
cv::Mat GetEstimatedTranslation();
float GetEstimatedScale();
protected:
void ComputeCentroid(cv::Mat &P, cv::Mat &Pr, cv::Mat &C);
void ComputeSim3(cv::Mat &P1, cv::Mat &P2);
void CheckInliers();
void Project(const std::vector<cv::Mat> &vP3Dw, std::vector<cv::Mat> &vP2D, cv::Mat Tcw, GeometricCamera* pCamera);
void FromCameraToImage(const std::vector<cv::Mat> &vP3Dc, std::vector<cv::Mat> &vP2D, GeometricCamera* pCamera);
protected:
// KeyFrames and matches
KeyFrame* mpKF1;
KeyFrame* mpKF2;
std::vector<cv::Mat> mvX3Dc1;
std::vector<cv::Mat> mvX3Dc2;
std::vector<MapPoint*> mvpMapPoints1;
std::vector<MapPoint*> mvpMapPoints2;
std::vector<MapPoint*> mvpMatches12;
std::vector<size_t> mvnIndices1;
std::vector<size_t> mvSigmaSquare1;
std::vector<size_t> mvSigmaSquare2;
std::vector<size_t> mvnMaxError1;
std::vector<size_t> mvnMaxError2;
int N;
int mN1;
// Current Estimation
cv::Mat mR12i;
cv::Mat mt12i;
float ms12i;
cv::Mat mT12i;
cv::Mat mT21i;
std::vector<bool> mvbInliersi;
int mnInliersi;
// Current Ransac State
int mnIterations;
std::vector<bool> mvbBestInliers;
int mnBestInliers;
cv::Mat mBestT12;
cv::Mat mBestRotation;
cv::Mat mBestTranslation;
float mBestScale;
// Scale is fixed to 1 in the stereo/RGBD case
bool mbFixScale;
// Indices for random selection
std::vector<size_t> mvAllIndices;
// Projections
std::vector<cv::Mat> mvP1im1;
std::vector<cv::Mat> mvP2im2;
// RANSAC probability
double mRansacProb;
// RANSAC min inliers
int mRansacMinInliers;
// RANSAC max iterations
int mRansacMaxIts;
// Threshold inlier/outlier. e = dist(Pi,T_ij*Pj)^2 < 5.991*mSigma2
float mTh;
float mSigma2;
// Calibration
cv::Mat mK1;
cv::Mat mK2;
GeometricCamera* pCamera1, *pCamera2;
};
} //namespace ORB_SLAM
#endif // SIM3SOLVER_H

243
include/System.h Normal file
View File

@@ -0,0 +1,243 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef SYSTEM_H
#define SYSTEM_H
#include <io.h>
#include "unistd.h"
#include<stdio.h>
#include<stdlib.h>
#include<string>
#include<thread>
#include<opencv2/core/core.hpp>
#include "Tracking.h"
#include "FrameDrawer.h"
#include "MapDrawer.h"
#include "Atlas.h"
#include "LocalMapping.h"
#include "LoopClosing.h"
#include "KeyFrameDatabase.h"
#include "ORBVocabulary.h"
#include "Viewer.h"
#include "ImuTypes.h"
#include "Config.h"
void usleep(__int64 usec);
namespace ORB_SLAM3
{
class Verbose
{
public:
enum eLevel
{
VERBOSITY_QUIET=0,
VERBOSITY_NORMAL=1,
VERBOSITY_VERBOSE=2,
VERBOSITY_VERY_VERBOSE=3,
VERBOSITY_DEBUG=4
};
static eLevel th;
public:
static void PrintMess(std::string str, eLevel lev)
{
if(lev <= th)
cout << str << endl;
}
static void SetTh(eLevel _th)
{
th = _th;
}
};
class Viewer;
class FrameDrawer;
class Atlas;
class Tracking;
class LocalMapping;
class LoopClosing;
class System
{
public:
// Input sensor
enum eSensor{
MONOCULAR=0,
STEREO=1,
RGBD=2,
IMU_MONOCULAR=3,
IMU_STEREO=4
};
// File type
enum eFileType{
TEXT_FILE=0,
BINARY_FILE=1,
};
public:
int GetCurID();
// Initialize the SLAM system. It launches the Local Mapping, Loop Closing and Viewer threads.
System(const string &strVocFile, const string &strSettingsFile, const eSensor sensor, const bool bUseViewer = true, const int initFr = 0, const string &strSequence = std::string(), const string &strLoadingFile = std::string());
// Proccess the given stereo frame. Images must be synchronized and rectified.
// Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackStereo(const cv::Mat &imLeft, const cv::Mat &imRight, const double &timestamp, const vector<IMU::Point>& vImuMeas = vector<IMU::Point>(), string filename="");
// Process the given rgbd frame. Depthmap must be registered to the RGB frame.
// Input image: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Input depthmap: Float (CV_32F).
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackRGBD(const cv::Mat &im, const cv::Mat &depthmap, const double &timestamp, string filename="");
// Proccess the given monocular frame and optionally imu data
// Input images: RGB (CV_8UC3) or grayscale (CV_8U). RGB is converted to grayscale.
// Returns the camera pose (empty if tracking fails).
cv::Mat TrackMonocular(const cv::Mat &im, const double &timestamp, const vector<IMU::Point>& vImuMeas = vector<IMU::Point>(), string filename="");
// This stops local mapping thread (map building) and performs only camera tracking.
void ActivateLocalizationMode();
// This resumes local mapping thread and performs SLAM again.
void DeactivateLocalizationMode();
// Returns true if there have been a big map change (loop closure, global BA)
// since last call to this function
bool MapChanged();
// Reset the system (clear Atlas or the active map)
void Reset();
void ResetActiveMap();
// All threads will be requested to finish.
// It waits until all threads have finished.
// This function must be called before saving the trajectory.
void Shutdown();
// Save camera trajectory in the TUM RGB-D dataset format.
// Only for stereo and RGB-D. This method does not work for monocular.
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveTrajectoryTUM(const string &filename);
// Save keyframe poses in the TUM RGB-D dataset format.
// This method works for all sensor input.
// Call first Shutdown()
// See format details at: http://vision.in.tum.de/data/datasets/rgbd-dataset
void SaveKeyFrameTrajectoryTUM(const string &filename);
void SaveTrajectoryEuRoC(const string &filename);
void SaveKeyFrameTrajectoryEuRoC(const string &filename);
// Save camera trajectory in the KITTI dataset format.
// Only for stereo and RGB-D. This method does not work for monocular.
// Call first Shutdown()
// See format details at: http://www.cvlibs.net/datasets/kitti/eval_odometry.php
void SaveTrajectoryKITTI(const string &filename);
// TODO: Save/Load functions
// SaveMap(const string &filename);
// LoadMap(const string &filename);
// Information from most recent processed frame
// You can call this right after TrackMonocular (or stereo or RGBD)
int GetTrackingState();
std::vector<MapPoint*> GetTrackedMapPoints();
std::vector<cv::KeyPoint> GetTrackedKeyPointsUn();
// For debugging
double GetTimeFromIMUInit();
bool isLost();
bool isFinished();
void ChangeDataset();
#ifdef REGISTER_TIMES
void InsertRectTime(double& time);
void InsertTrackTime(double& time);
#endif
private:
// Input sensor
eSensor mSensor;
// ORB vocabulary used for place recognition and feature matching.
ORBVocabulary* mpVocabulary;
// KeyFrame database for place recognition (relocalization and loop detection).
KeyFrameDatabase* mpKeyFrameDatabase;
// Atlas structure that stores the pointers to all KeyFrames and MapPoints.
Atlas* mpAtlas;
// Tracker. It receives a frame and computes the associated camera pose.
// It also decides when to insert a new keyframe, create some new MapPoints and
// performs relocalization if tracking fails.
Tracking* mpTracker;
// Local Mapper. It manages the local map and performs local bundle adjustment.
LocalMapping* mpLocalMapper;
// Loop Closer. It searches loops with every new keyframe. If there is a loop it performs
// a pose graph optimization and full bundle adjustment (in a new thread) afterwards.
LoopClosing* mpLoopCloser;
// The viewer draws the map and the current camera pose. It uses Pangolin.
Viewer* mpViewer;
FrameDrawer* mpFrameDrawer;
MapDrawer* mpMapDrawer;
// System threads: Local Mapping, Loop Closing, Viewer.
// The Tracking thread "lives" in the main execution thread that creates the System object.
std::thread* mptLocalMapping;
std::thread* mptLoopClosing;
std::thread* mptViewer;
// Reset flag
std::mutex mMutexReset;
bool mbReset;
bool mbResetActiveMap;
// Change mode flags
std::mutex mMutexMode;
bool mbActivateLocalizationMode;
bool mbDeactivateLocalizationMode;
// Tracking state
int mTrackingState;
std::vector<MapPoint*> mTrackedMapPoints;
std::vector<cv::KeyPoint> mTrackedKeyPointsUn;
std::mutex mMutexState;
};
}// namespace ORB_SLAM
#endif // SYSTEM_H

353
include/Tracking.h Normal file
View File

@@ -0,0 +1,353 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef TRACKING_H
#define TRACKING_H
#include<opencv2/core/core.hpp>
#include<opencv2/features2d/features2d.hpp>
#include <opencv2/video/tracking.hpp>
#include"Viewer.h"
#include"FrameDrawer.h"
#include"Atlas.h"
#include"LocalMapping.h"
#include"LoopClosing.h"
#include"Frame.h"
#include "ORBVocabulary.h"
#include"KeyFrameDatabase.h"
#include"ORBextractor.h"
#include "Initializer.h"
#include "MapDrawer.h"
#include "System.h"
#include "ImuTypes.h"
#include "GeometricCamera.h"
#include <mutex>
#include <unordered_set>
namespace ORB_SLAM3
{
class Viewer;
class FrameDrawer;
class Atlas;
class LocalMapping;
class LoopClosing;
class System;
class Tracking
{
public:
Tracking(System* pSys, ORBVocabulary* pVoc, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Atlas* pAtlas,
KeyFrameDatabase* pKFDB, const string &strSettingPath, const int sensor, const string &_nameSeq=std::string());
~Tracking();
// Parse the config file
bool ParseCamParamFile(cv::FileStorage &fSettings);
bool ParseORBParamFile(cv::FileStorage &fSettings);
bool ParseIMUParamFile(cv::FileStorage &fSettings);
// Preprocess the input and call Track(). Extract features and performs stereo matching.
cv::Mat GrabImageStereo(const cv::Mat &imRectLeft,const cv::Mat &imRectRight, const double &timestamp, string filename);
cv::Mat GrabImageRGBD(const cv::Mat &imRGB,const cv::Mat &imD, const double &timestamp, string filename);
cv::Mat GrabImageMonocular(const cv::Mat &im, const double &timestamp, string filename);
// cv::Mat GrabImageImuMonocular(const cv::Mat &im, const double &timestamp);
void GrabImuData(const IMU::Point &imuMeasurement);
void SetLocalMapper(LocalMapping* pLocalMapper);
void SetLoopClosing(LoopClosing* pLoopClosing);
void SetViewer(Viewer* pViewer);
void SetStepByStep(bool bSet);
// Load new settings
// The focal lenght should be similar or scale prediction will fail when projecting points
void ChangeCalibration(const string &strSettingPath);
// Use this function if you have deactivated local mapping and you only want to localize the camera.
void InformOnlyTracking(const bool &flag);
void UpdateFrameIMU(const float s, const IMU::Bias &b, KeyFrame* pCurrentKeyFrame);
KeyFrame* GetLastKeyFrame()
{
return mpLastKeyFrame;
}
void CreateMapInAtlas();
std::mutex mMutexTracks;
//--
void NewDataset();
int GetNumberDataset();
int GetMatchesInliers();
public:
// Tracking states
enum eTrackingState{
SYSTEM_NOT_READY=-1,
NO_IMAGES_YET=0,
NOT_INITIALIZED=1,
OK=2,
RECENTLY_LOST=3,
LOST=4,
OK_KLT=5
};
eTrackingState mState;
eTrackingState mLastProcessedState;
// Input sensor
int mSensor;
// Current Frame
Frame mCurrentFrame;
Frame mLastFrame;
cv::Mat mImGray;
// Initialization Variables (Monocular)
std::vector<int> mvIniLastMatches;
std::vector<int> mvIniMatches;
std::vector<cv::Point2f> mvbPrevMatched;
std::vector<cv::Point3f> mvIniP3D;
Frame mInitialFrame;
// Lists used to recover the full camera trajectory at the end of the execution.
// Basically we store the reference keyframe for each frame and its relative transformation
list<cv::Mat> mlRelativeFramePoses;
list<KeyFrame*> mlpReferences;
list<double> mlFrameTimes;
list<bool> mlbLost;
// frames with estimated pose
int mTrackedFr;
bool mbStep;
// True if local mapping is deactivated and we are performing only localization
bool mbOnlyTracking;
void Reset(bool bLocMap = false);
void ResetActiveMap(bool bLocMap = false);
float mMeanTrack;
bool mbInitWith3KFs;
double t0; // time-stamp of first read frame
double t0vis; // time-stamp of first inserted keyframe
double t0IMU; // time-stamp of IMU initialization
vector<MapPoint*> GetLocalMapMPS();
bool mbWriteStats;
#ifdef REGISTER_TIMES
void LocalMapStats2File();
void TrackStats2File();
void PrintTimeStats();
vector<double> vdRectStereo_ms;
vector<double> vdORBExtract_ms;
vector<double> vdStereoMatch_ms;
vector<double> vdIMUInteg_ms;
vector<double> vdPosePred_ms;
vector<double> vdLMTrack_ms;
vector<double> vdNewKF_ms;
vector<double> vdTrackTotal_ms;
vector<double> vdUpdatedLM_ms;
vector<double> vdSearchLP_ms;
vector<double> vdPoseOpt_ms;
#endif
vector<int> vnKeyFramesLM;
vector<int> vnMapPointsLM;
protected:
// Main tracking function. It is independent of the input sensor.
void Track();
// Map initialization for stereo and RGB-D
void StereoInitialization();
// Map initialization for monocular
void MonocularInitialization();
void CreateNewMapPoints();
cv::Mat ComputeF12(KeyFrame *&pKF1, KeyFrame *&pKF2);
void CreateInitialMapMonocular();
void CheckReplacedInLastFrame();
bool TrackReferenceKeyFrame();
void UpdateLastFrame();
bool TrackWithMotionModel();
bool PredictStateIMU();
bool Relocalization();
void UpdateLocalMap();
void UpdateLocalPoints();
void UpdateLocalKeyFrames();
bool TrackLocalMap();
bool TrackLocalMap_old();
void SearchLocalPoints();
bool NeedNewKeyFrame();
void CreateNewKeyFrame();
// Perform preintegration from last frame
void PreintegrateIMU();
// Reset IMU biases and compute frame velocity
void ComputeGyroBias(const vector<Frame*> &vpFs, float &bwx, float &bwy, float &bwz);
void ComputeVelocitiesAccBias(const vector<Frame*> &vpFs, float &bax, float &bay, float &baz);
bool mbMapUpdated;
// Imu preintegration from last frame
IMU::Preintegrated *mpImuPreintegratedFromLastKF;
// Queue of IMU measurements between frames
std::list<IMU::Point> mlQueueImuData;
// Vector of IMU measurements from previous to current frame (to be filled by PreintegrateIMU)
std::vector<IMU::Point> mvImuFromLastFrame;
std::mutex mMutexImuQueue;
// Imu calibration parameters
IMU::Calib *mpImuCalib;
// Last Bias Estimation (at keyframe creation)
IMU::Bias mLastBias;
// In case of performing only localization, this flag is true when there are no matches to
// points in the map. Still tracking will continue if there are enough matches with temporal points.
// In that case we are doing visual odometry. The system will try to do relocalization to recover
// "zero-drift" localization to the map.
bool mbVO;
//Other Thread Pointers
LocalMapping* mpLocalMapper;
LoopClosing* mpLoopClosing;
//ORB
ORBextractor* mpORBextractorLeft, *mpORBextractorRight;
ORBextractor* mpIniORBextractor;
//BoW
ORBVocabulary* mpORBVocabulary;
KeyFrameDatabase* mpKeyFrameDB;
// Initalization (only for monocular)
Initializer* mpInitializer;
bool mbSetInit;
//Local Map
KeyFrame* mpReferenceKF;
std::vector<KeyFrame*> mvpLocalKeyFrames;
std::vector<MapPoint*> mvpLocalMapPoints;
// System
System* mpSystem;
//Drawers
Viewer* mpViewer;
FrameDrawer* mpFrameDrawer;
MapDrawer* mpMapDrawer;
bool bStepByStep;
//Atlas
Atlas* mpAtlas;
//Calibration matrix
cv::Mat mK;
cv::Mat mDistCoef;
float mbf;
//New KeyFrame rules (according to fps)
int mMinFrames;
int mMaxFrames;
int mnFirstImuFrameId;
int mnFramesToResetIMU;
// Threshold close/far points
// Points seen as close by the stereo/RGBD sensor are considered reliable
// and inserted from just one frame. Far points requiere a match in two keyframes.
float mThDepth;
// For RGB-D inputs only. For some datasets (e.g. TUM) the depthmap values are scaled.
float mDepthMapFactor;
//Current matches in frame
int mnMatchesInliers;
//Last Frame, KeyFrame and Relocalisation Info
KeyFrame* mpLastKeyFrame;
unsigned int mnLastKeyFrameId;
unsigned int mnLastRelocFrameId;
double mTimeStampLost;
double time_recently_lost;
double time_recently_lost_visual;
unsigned int mnFirstFrameId;
unsigned int mnInitialFrameId;
unsigned int mnLastInitFrameId;
bool mbCreatedMap;
//Motion Model
cv::Mat mVelocity;
//Color order (true RGB, false BGR, ignored if grayscale)
bool mbRGB;
list<MapPoint*> mlpTemporalPoints;
//int nMapChangeIndex;
int mnNumDataset;
ofstream f_track_stats;
ofstream f_track_times;
double mTime_PreIntIMU;
double mTime_PosePred;
double mTime_LocalMapTrack;
double mTime_NewKF_Dec;
GeometricCamera* mpCamera, *mpCamera2;
int initID, lastID;
cv::Mat mTlr;
public:
cv::Mat mImRight;
};
} //namespace ORB_SLAM
#endif // TRACKING_H

View File

@@ -0,0 +1,99 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef TwoViewReconstruction_H
#define TwoViewReconstruction_H
#include<opencv2/opencv.hpp>
#include <unordered_set>
namespace ORB_SLAM3
{
class TwoViewReconstruction
{
typedef std::pair<int,int> Match;
public:
// Fix the reference frame
TwoViewReconstruction(cv::Mat& k, float sigma = 1.0, int iterations = 200);
// Computes in parallel a fundamental matrix and a homography
// Selects a model and tries to recover the motion and the structure from motion
bool Reconstruct(const std::vector<cv::KeyPoint>& vKeys1, const std::vector<cv::KeyPoint>& vKeys2, const std::vector<int> &vMatches12,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated);
private:
void FindHomography(std::vector<bool> &vbMatchesInliers, float &score, cv::Mat &H21);
void FindFundamental(std::vector<bool> &vbInliers, float &score, cv::Mat &F21);
cv::Mat ComputeH21(const std::vector<cv::Point2f> &vP1, const std::vector<cv::Point2f> &vP2);
cv::Mat ComputeF21(const std::vector<cv::Point2f> &vP1, const std::vector<cv::Point2f> &vP2);
float CheckHomography(const cv::Mat &H21, const cv::Mat &H12, std::vector<bool> &vbMatchesInliers, float sigma);
float CheckFundamental(const cv::Mat &F21, std::vector<bool> &vbMatchesInliers, float sigma);
bool ReconstructF(std::vector<bool> &vbMatchesInliers, cv::Mat &F21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D, std::vector<bool> &vbTriangulated, float minParallax, int minTriangulated);
bool ReconstructH(std::vector<bool> &vbMatchesInliers, cv::Mat &H21, cv::Mat &K,
cv::Mat &R21, cv::Mat &t21, std::vector<cv::Point3f> &vP3D,std:: vector<bool> &vbTriangulated, float minParallax, int minTriangulated);
void Triangulate(const cv::KeyPoint &kp1, const cv::KeyPoint &kp2, const cv::Mat &P1, const cv::Mat &P2, cv::Mat &x3D);
void Normalize(const std::vector<cv::KeyPoint> &vKeys, std::vector<cv::Point2f> &vNormalizedPoints, cv::Mat &T);
int CheckRT(const cv::Mat &R, const cv::Mat &t, const std::vector<cv::KeyPoint> &vKeys1, const std::vector<cv::KeyPoint> &vKeys2,
const std::vector<Match> &vMatches12, std::vector<bool> &vbInliers,
const cv::Mat &K, std::vector<cv::Point3f> &vP3D, float th2, std::vector<bool> &vbGood, float &parallax);
void DecomposeE(const cv::Mat &E, cv::Mat &R1, cv::Mat &R2, cv::Mat &t);
// Keypoints from Reference Frame (Frame 1)
std::vector<cv::KeyPoint> mvKeys1;
// Keypoints from Current Frame (Frame 2)
std::vector<cv::KeyPoint> mvKeys2;
// Current Matches from Reference to Current
std::vector<Match> mvMatches12;
std::vector<bool> mvbMatched1;
// Calibration
cv::Mat mK;
// Standard Deviation and Variance
float mSigma, mSigma2;
// Ransac max iterations
int mMaxIterations;
// Ransac sets
std::vector<std::vector<size_t> > mvSets;
};
} //namespace ORB_SLAM
#endif // TwoViewReconstruction_H

98
include/Viewer.h Normal file
View File

@@ -0,0 +1,98 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2020 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef VIEWER_H
#define VIEWER_H
#include "FrameDrawer.h"
#include "MapDrawer.h"
#include "Tracking.h"
#include "System.h"
#include <mutex>
namespace ORB_SLAM3
{
class Tracking;
class FrameDrawer;
class MapDrawer;
class System;
class Viewer
{
public:
Viewer(System* pSystem, FrameDrawer* pFrameDrawer, MapDrawer* pMapDrawer, Tracking *pTracking, const string &strSettingPath);
// Main thread function. Draw points, keyframes, the current camera pose and the last processed
// frame. Drawing is refreshed according to the camera fps. We use Pangolin.
void Run();
void RequestFinish();
void RequestStop();
bool isFinished();
bool isStopped();
bool isStepByStep();
void Release();
void SetTrackingPause();
bool both;
private:
bool ParseViewerParamFile(cv::FileStorage &fSettings);
bool Stop();
System* mpSystem;
FrameDrawer* mpFrameDrawer;
MapDrawer* mpMapDrawer;
Tracking* mpTracker;
// 1/fps in ms
double mT;
float mImageWidth, mImageHeight;
float mViewpointX, mViewpointY, mViewpointZ, mViewpointF;
bool CheckFinish();
void SetFinish();
bool mbFinishRequested;
bool mbFinished;
std::mutex mMutexFinish;
bool mbStopped;
bool mbStopRequested;
std::mutex mMutexStop;
bool mbStopTrack;
};
}
#endif // VIEWER_H

1258
include/getopt.c Normal file

File diff suppressed because it is too large Load Diff

188
include/getopt.h Normal file
View File

@@ -0,0 +1,188 @@
/* getopt.h */
/* Declarations for getopt.
Copyright (C) 1989-1994, 1996-1999, 2001 Free Software
Foundation, Inc. This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute
it and/or modify it under the terms of the GNU Lesser
General Public License as published by the Free Software
Foundation; either version 2.1 of the License, or
(at your option) any later version.
The GNU C Library is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General
Public License along with the GNU C Library; if not, write
to the Free Software Foundation, Inc., 59 Temple Place,
Suite 330, Boston, MA 02111-1307 USA. */
#ifndef _GETOPT_H
#ifndef __need_getopt
# define _GETOPT_H 1
#endif
/* If __GNU_LIBRARY__ is not already defined, either we are being used
standalone, or this is the first header included in the source file.
If we are being used with glibc, we need to include <features.h>, but
that does not exist if we are standalone. So: if __GNU_LIBRARY__ is
not defined, include <ctype.h>, which will pull in <features.h> for us
if it's from glibc. (Why ctype.h? It's guaranteed to exist and it
doesn't flood the namespace with stuff the way some other headers do.) */
#if !defined __GNU_LIBRARY__
# include <ctype.h>
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* For communication from `getopt' to the caller.
When `getopt' finds an option that takes an argument,
the argument value is returned here.
Also, when `ordering' is RETURN_IN_ORDER,
each non-option ARGV-element is returned here. */
extern char *optarg;
/* Index in ARGV of the next element to be scanned.
This is used for communication to and from the caller
and for communication between successive calls to `getopt'.
On entry to `getopt', zero means this is the first call; initialize.
When `getopt' returns -1, this is the index of the first of the
non-option elements that the caller should itself scan.
Otherwise, `optind' communicates from one call to the next
how much of ARGV has been scanned so far. */
extern int optind;
/* Callers store zero here to inhibit the error message `getopt' prints
for unrecognized options. */
extern int opterr;
/* Set to an option character which was unrecognized. */
extern int optopt;
#ifndef __need_getopt
/* Describe the long-named options requested by the application.
The LONG_OPTIONS argument to getopt_long or getopt_long_only is a vector
of `struct option' terminated by an element containing a name which is
zero.
The field `has_arg' is:
no_argument (or 0) if the option does not take an argument,
required_argument (or 1) if the option requires an argument,
optional_argument (or 2) if the option takes an optional argument.
If the field `flag' is not NULL, it points to a variable that is set
to the value given in the field `val' when the option is found, but
left unchanged if the option is not found.
To have a long-named option do something other than set an `int' to
a compiled-in constant, such as set a value from `optarg', set the
option's `flag' field to zero and its `val' field to a nonzero
value (the equivalent single-letter option character, if there is
one). For long options that have a zero `flag' field, `getopt'
returns the contents of the `val' field. */
struct option
{
# if (defined __STDC__ && __STDC__) || defined __cplusplus
const char *name;
# else
char *name;
# endif
/* has_arg can't be an enum because some compilers complain about
type mismatches in all the code that assumes it is an int. */
int has_arg;
int *flag;
int val;
};
/* Names for the values of the `has_arg' field of `struct option'. */
# define no_argument 0
# define required_argument 1
# define optional_argument 2
#endif /* need getopt */
/* Get definitions and prototypes for functions to process the
arguments in ARGV (ARGC of them, minus the program name) for
options given in OPTS.
Return the option character from OPTS just read. Return -1 when
there are no more options. For unrecognized options, or options
missing arguments, `optopt' is set to the option letter, and '?' is
returned.
The OPTS string is a list of characters which are recognized option
letters, optionally followed by colons, specifying that that letter
takes an argument, to be placed in `optarg'.
If a letter in OPTS is followed by two colons, its argument is
optional. This behavior is specific to the GNU `getopt'.
The argument `--' causes premature termination of argument
scanning, explicitly telling `getopt' that there are no more
options.
If OPTS begins with `--', then non-option arguments are treated as
arguments to the option '\0'. This behavior is specific to the GNU
`getopt'. */
#if (defined __STDC__ && __STDC__) || defined __cplusplus
# ifdef __GNU_LIBRARY__
/* Many other libraries have conflicting prototypes for getopt, with
differences in the consts, in stdlib.h. To avoid compilation
errors, only prototype getopt for the GNU C library. */
extern int getopt (int ___argc, char *const *___argv, const char *__shortopts);
# else /* not __GNU_LIBRARY__ */
extern int getopt ();
# endif /* __GNU_LIBRARY__ */
# ifndef __need_getopt
extern int getopt_long (int ___argc, char *const *___argv,
const char *__shortopts,
const struct option *__longopts, int *__longind);
extern int getopt_long_only (int ___argc, char *const *___argv,
const char *__shortopts,
const struct option *__longopts, int *__longind);
/* Internal only. Users should not call this directly. */
extern int _getopt_internal (int ___argc, char *const *___argv,
const char *__shortopts,
const struct option *__longopts, int *__longind,
int __long_only);
# endif
#else /* not __STDC__ */
extern int getopt ();
# ifndef __need_getopt
extern int getopt_long ();
extern int getopt_long_only ();
extern int _getopt_internal ();
# endif
#endif /* __STDC__ */
#ifdef __cplusplus
}
#endif
/* Make sure we later can get all the definitions and declarations. */
#undef __need_getopt
#endif /* getopt.h */

59
include/unistd.h Normal file
View File

@@ -0,0 +1,59 @@
#ifndef _UNISTD_H
#define _UNISTD_H 1
/* This is intended as a drop-in replacement for unistd.h on Windows.
* Please add functionality as neeeded.
* https://stackoverflow.com/a/826027/1202830
*/
#include <stdlib.h>
#include <io.h>
#include <getopt.h> /* getopt at: https:// gist.github.com/ashelly/7776712 */
#include <process.h> /* for getpid() and the exec..() family */
#include <direct.h> /* for _getcwd() and _chdir() */
#include <chrono>
#include <thread>
#define srandom srand
#define random rand
/* Values for the second argument to access.
These may be OR'd together. */
#define R_OK 4 /* Test for read permission. */
#define W_OK 2 /* Test for write permission. */
//#define X_OK 1 /* execute permission - unsupported in windows*/
#define F_OK 0 /* Test for existence. */
#define access _access
#define dup2 _dup2
#define execve _execve
#define ftruncate _chsize
#define unlink _unlink
#define fileno _fileno
#define getcwd _getcwd
#define chdir _chdir
#define isatty _isatty
#define lseek _lseek
/* read, write, and close are NOT being #defined here, because while there are file handle specific versions for Windows, they probably don't work for sockets. You need to look at your app and consider whether to call e.g. closesocket(). */
#ifdef _WIN64
#define ssize_t __int64
#else
#define ssize_t long
#endif
#define STDIN_FILENO 0
#define STDOUT_FILENO 1
#define STDERR_FILENO 2
/* should be in some equivalent to <sys/types.h> */
//typedef __int8 int8_t;
//typedef __int16 int16_t;
//typedef __int32 int32_t;
//typedef __int64 int64_t;
//typedef unsigned __int8 uint8_t;
//typedef unsigned __int16 uint16_t;
//typedef unsigned __int32 uint32_t;
//typedef unsigned __int64 uint64_t;
#endif /* unistd.h */