Upload files to 'Examples/Stereo-Inertial'

This commit is contained in:
2022-07-19 08:38:42 +03:00
parent 0a37482915
commit 4491ab79ee
5 changed files with 637 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
# Usage: ./stereo_inertial_uzh-fpv path_to_vocabulary path_to_settings path_to_image_folder_1 path_to_times_file_left_1 path_to_times_file_right_1
./stereo_inertial_uzh-fpv /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Vocabulary/ORBvoc.txt /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/uzh-fpv.yaml ~/work_drivecast/datasets/uzh-fpv/indoor_forward_5_snapdragon_with_gt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_5_snapdragon_with_gt/left_images.txt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_5_snapdragon_with_gt/right_images.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_5_snapdragon_with_gt/imu.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_8_snapdragon /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_8_snapdragon/left_images.txt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_8_snapdragon/right_images.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_8_snapdragon/imu.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_3_snapdragon_with_gt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_3_snapdragon_with_gt/left_images.txt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_3_snapdragon_with_gt/right_images.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_3_snapdragon_with_gt/imu.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_9_snapdragon_with_gt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_9_snapdragon_with_gt/left_images.txt /home/pi/work_drivecast/datasets/uzh-fpv/indoor_forward_9_snapdragon_with_gt/right_images.txt ~/work_drivecast/datasets/uzh-fpv/indoor_forward_9_snapdragon_with_gt/imu.txt ~/work_drivecast/datasets/uzh-fpv
#./stereo_inertial_uzh-fpv /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Vocabulary/ORBvoc.txt /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/uzh-fpv.yaml ~/work_drivecast/datasets/uzh-fpv/outdoor_forward_1_snapdragon_with_gt ~/work_drivecast/datasets/uzh-fpv/outdoor_forward_1_snapdragon_with_gt/left_images.txt ~/work_drivecast/datasets/uzh-fpv/outdoor_forward_1_snapdragon_with_gt/right_images.txt ~/work_drivecast/datasets/uzh-fpv/outdoor_forward_1_snapdragon_with_gt/imu.txt

View File

@@ -0,0 +1,8 @@
# Usage: ./stereo_inertial_euroc path_to_vocabulary path_to_settings path_to_sequence_folder_1 path_to_times_file_1 (path_to_image_folder_2 path_to_times_file_2 ... path_to_image_folder_N path_to_times_file_N)
./stereo_inertial_euroc /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Vocabulary/ORBvoc.txt /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC.yaml /home/pi/work_drivecast/datasets/euroc-mav/MH_01_easy ~/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC_TimeStamps/MH01.txt
#./stereo_inertial_euroc /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Vocabulary/ORBvoc.txt /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC.yaml /home/pi/work_drivecast/datasets/euroc-mav/MH_05_difficult ~/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC_TimeStamps/MH05.txt
#./stereo_inertial_euroc /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Vocabulary/ORBvoc.txt /home/pi/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC.yaml /home/pi/work_drivecast/datasets/euroc-mav/V2_03_difficult ~/work_drivecast/slams/ORB_SLAM3-1.0-release/Examples/Stereo-Inertial/EuRoC_TimeStamps/V203.txt

View File

@@ -0,0 +1,394 @@
/**
* This file is part of ORB-SLAM3
*
* Copyright (C) 2017-2021 Carlos Campos, Richard Elvira, Juan J. Gómez Rodríguez, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
* Copyright (C) 2014-2016 Raúl Mur-Artal, José M.M. Montiel and Juan D. Tardós, University of Zaragoza.
*
* ORB-SLAM3 is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ORB-SLAM3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with ORB-SLAM3.
* If not, see <http://www.gnu.org/licenses/>.
*/
#include<iostream>
#include<algorithm>
#include<fstream>
#include<chrono>
#include <ctime>
#include <sstream>
#include<opencv2/core/core.hpp>
#include<System.h>
#include "ImuTypes.h"
using namespace std;
void LoadImagesTUMVI(const string &strPath, const string &strPathTimesLeft, const string &strPathTimesRight,
vector<string> &vstrImageLeft, vector<string> &vstrImageRight, vector<double> &vTimeStamps);
void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::Point3f> &vAcc, vector<cv::Point3f> &vGyro);
double ttrack_tot = 0;
int main(int argc, char **argv)
{
const int num_seq = (argc-3)/4;
cout << "num_seq = " << num_seq << endl;
bool bFileName= (((argc-3) % 4) == 1);
string file_name;
if (bFileName)
file_name = string(argv[argc-1]);
if(argc < 7)
{
cerr << endl << "Usage: ./stereo_inertial_tum_vi path_to_vocabulary path_to_settings path_to_image_folder_1 path_to_times_file1 path_to_times_file2 path_to_imu_data (trajectory_file_name)" << endl;
return 1;
}
// Load all sequences:
int seq;
vector< vector<string> > vstrImageLeftFilenames;
vector< vector<string> > vstrImageRightFilenames;
vector< vector<double> > vTimestampsCam;
vector< vector<cv::Point3f> > vAcc, vGyro;
vector< vector<double> > vTimestampsImu;
vector<int> nImages;
vector<int> nImu;
vector<int> first_imu(num_seq,0);
vstrImageLeftFilenames.resize(num_seq);
vstrImageRightFilenames.resize(num_seq);
vTimestampsCam.resize(num_seq);
vAcc.resize(num_seq);
vGyro.resize(num_seq);
vTimestampsImu.resize(num_seq);
nImages.resize(num_seq);
nImu.resize(num_seq);
int tot_images = 0;
for (seq = 0; seq<num_seq; seq++)
{
cout << "Loading images for sequence " << seq << "...";
LoadImagesTUMVI(string(argv[(4*seq)+3]), string(argv[(4*seq)+4]), string(argv[(4*seq)+5]), vstrImageLeftFilenames[seq], vstrImageRightFilenames[seq], vTimestampsCam[seq]);
cout << "Total images: " << vstrImageLeftFilenames[seq].size() << endl;
cout << "Total cam ts: " << vTimestampsCam[seq].size() << endl;
cout << "first cam ts: " << vTimestampsCam[seq][0] << endl;
cout << "LOADED!" << endl;
cout << "Loading IMU for sequence " << seq << "...";
LoadIMU(string(argv[(4*seq)+6]), vTimestampsImu[seq], vAcc[seq], vGyro[seq]);
cout << "Total IMU meas: " << vTimestampsImu[seq].size() << endl;
cout << "first IMU ts: " << vTimestampsImu[seq][0] << endl;
cout << "LOADED!" << endl;
nImages[seq] = vstrImageLeftFilenames[seq].size();
std::cout << "1" << std::endl;
tot_images += nImages[seq];
std::cout << "2" << std::endl;
nImu[seq] = vTimestampsImu[seq].size();
std::cout << "3" << std::endl;
// Let's see the Timestamps extracted
std::cout << "The IMU Timestamps: " << std::endl;
for (int i = 0; i < 10; i++){
std::cout << vTimestampsImu[seq][i] << " ";
}
std::cout << "The Camera Timestamps: " << std::endl;
for (int i = 0; i < 10; i++){
std::cout << vTimestampsCam[seq][i] << " ";
}
if((nImages[seq]<=0)||(nImu[seq]<=0))
{
cerr << "ERROR: Failed to load images or IMU for sequence" << seq << endl;
return 1;
}
std::cout << "4" << std::endl;
// Find first imu to be considered, supposing imu measurements start first
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][0])
first_imu[seq]++;
first_imu[seq]--; // first imu measurement to be considered
std::cout << "5" << std::endl;
}
// Vector for tracking time statistics
vector<float> vTimesTrack;
vTimesTrack.resize(tot_images);
std::cout << "6" << std::endl;
cout << endl << "-------" << endl;
cout.precision(17);
/*cout << "Start processing sequence ..." << endl;
cout << "Images in the sequence: " << nImages << endl;
cout << "IMU data in the sequence: " << nImu << endl << endl;*/
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM3::System SLAM(argv[1],argv[2],ORB_SLAM3::System::IMU_STEREO, true, 0, file_name);
std::cout << "7" << std::endl;
float imageScale = SLAM.GetImageScale();
double t_resize = 0.f;
double t_track = 0.f;
int proccIm = 0;
for (seq = 0; seq<num_seq; seq++)
{
// Main loop
cv::Mat imLeft, imRight;
vector<ORB_SLAM3::IMU::Point> vImuMeas;
proccIm = 0;
cv::Ptr<cv::CLAHE> clahe = cv::createCLAHE(3.0, cv::Size(8, 8));
for(int ni=0; ni<nImages[seq]; ni++, proccIm++)
{
// Read image from file
std::string imgPathLeft(vstrImageLeftFilenames[seq][ni].begin(), vstrImageLeftFilenames[seq][ni].end() - 1);
std::string imgPathRight(vstrImageRightFilenames[seq][ni].begin(), vstrImageRightFilenames[seq][ni].end() - 1);
imLeft = cv::imread(imgPathLeft,cv::IMREAD_GRAYSCALE);
imRight = cv::imread(imgPathRight,cv::IMREAD_GRAYSCALE);
if(imageScale != 1.f)
{
#ifdef REGISTER_TIMES
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t_Start_Resize = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t_Start_Resize = std::chrono::monotonic_clock::now();
#endif
#endif
int width = imLeft.cols * imageScale;
int height = imLeft.rows * imageScale;
cv::resize(imLeft, imLeft, cv::Size(width, height));
cv::resize(imRight, imRight, cv::Size(width, height));
#ifdef REGISTER_TIMES
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t_End_Resize = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t_End_Resize = std::chrono::monotonic_clock::now();
#endif
t_resize = std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t_End_Resize - t_Start_Resize).count();
SLAM.InsertResizeTime(t_resize);
#endif
}
// clahe
clahe->apply(imLeft,imLeft);
clahe->apply(imRight,imRight);
double tframe = vTimestampsCam[seq][ni];
if(imLeft.empty() || imRight.empty())
{
cerr << endl << "Failed to load image at: "
<< vstrImageLeftFilenames[seq][ni] << endl;
return 1;
}
// Load imu measurements from previous frame
vImuMeas.clear();
if(ni>0)
{
// cout << "t_cam " << tframe << endl;
while(vTimestampsImu[seq][first_imu[seq]]<=vTimestampsCam[seq][ni])
{
// vImuMeas.push_back(ORB_SLAM3::IMU::Point(vAcc[first_imu],vGyro[first_imu],vTimestampsImu[first_imu]));
auto currImuMeas = ORB_SLAM3::IMU::Point(vAcc[seq][first_imu[seq]].x,vAcc[seq][first_imu[seq]].y,vAcc[seq][first_imu[seq]].z,
vGyro[seq][first_imu[seq]].x,vGyro[seq][first_imu[seq]].y,vGyro[seq][first_imu[seq]].z,
vTimestampsImu[seq][first_imu[seq]]);
// std::cout << "Current IMU measurement accelerometer: " << currImuMeas.a << std::endl;
// std::cout << "Current IMU measurement gyro: " << currImuMeas.w << std::endl;
vImuMeas.push_back(currImuMeas);
// cout << "t_imu = " << fixed << vImuMeas.back().t << endl;
first_imu[seq]++;
}
}
/*cout << "first imu: " << first_imu[seq] << endl;
cout << "first imu time: " << fixed << vTimestampsImu[seq][0] << endl;
cout << "size vImu: " << vImuMeas.size() << endl;*/
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now();
#endif
// Pass the image to the SLAM system
SLAM.TrackStereo(imLeft,imRight,tframe,vImuMeas);
#ifdef COMPILEDWITHC11
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
#else
std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
#endif
#ifdef REGISTER_TIMES
t_track = t_resize + std::chrono::duration_cast<std::chrono::duration<double,std::milli> >(t2 - t1).count();
SLAM.InsertTrackTime(t_track);
#endif
double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
ttrack_tot += ttrack;
// std::cout << "ttrack: " << ttrack << std::endl;
vTimesTrack[ni]=ttrack;
// Wait to load the next frame
double T=0;
if(ni<nImages[seq]-1)
T = vTimestampsCam[seq][ni+1]-tframe;
else if(ni>0)
T = tframe-vTimestampsCam[seq][ni-1];
if(ttrack<T)
usleep((T-ttrack)*1e6); // 1e6
}
if(seq < num_seq - 1)
{
cout << "Changing the dataset" << endl;
SLAM.ChangeDataset();
}
}
// Stop all threads
SLAM.Shutdown();
// Tracking time statistics
// Save camera trajectory
std::chrono::system_clock::time_point scNow = std::chrono::system_clock::now();
std::time_t now = std::chrono::system_clock::to_time_t(scNow);
std::stringstream ss;
ss << now;
if (bFileName)
{
const string kf_file = "kf_" + string(argv[argc-1]) + ".txt";
const string f_file = "f_" + string(argv[argc-1]) + ".txt";
SLAM.SaveTrajectoryEuRoC(f_file);
SLAM.SaveKeyFrameTrajectoryEuRoC(kf_file);
}
else
{
SLAM.SaveTrajectoryEuRoC("CameraTrajectory.txt");
SLAM.SaveKeyFrameTrajectoryEuRoC("KeyFrameTrajectory.txt");
}
sort(vTimesTrack.begin(),vTimesTrack.end());
float totaltime = 0;
for(int ni=0; ni<nImages[0]; ni++)
{
totaltime+=vTimesTrack[ni];
}
cout << "-------" << endl << endl;
cout << "median tracking time: " << vTimesTrack[nImages[0]/2] << endl;
cout << "mean tracking time: " << totaltime/proccIm << endl;
return 0;
}
void LoadImagesTUMVI(const string &strPath, const string &strPathTimesLeft, const string &strPathTimesRight,
vector<string> &vstrImageLeft, vector<string> &vstrImageRight, vector<double> &vTimeStamps)
{
ifstream fTimesLeft, fTimesRight;
cout << strPath << endl;
cout << strPathTimesRight << endl;
cout << strPathTimesLeft << endl;
fTimesLeft.open(strPathTimesLeft.c_str());
fTimesRight.open(strPathTimesRight.c_str());
vTimeStamps.reserve(5000);
vstrImageLeft.reserve(5000);
vstrImageRight.reserve(5000);
while(!fTimesLeft.eof())
{
string sLeft, sRight;
getline(fTimesLeft,sLeft);
getline(fTimesRight, sRight);
if(!sLeft.empty())
{
if (sLeft[0] == '#')
continue;
int pos = sLeft.find(' ');
sLeft = sLeft.substr(pos+1, string::npos);
sRight = sRight.substr(pos+1, string::npos);
pos = sLeft.find(' ');
string item = sLeft.substr(0, pos);
string strImgLeft = sLeft.substr(pos+1, string::npos);
// std::cout << "The resulting left filename string is: " << strImgLeft << std::endl;
string strImgRight = sRight.substr(pos+1, string::npos);
std::remove(strImgLeft.begin(), strImgLeft.end(), ' ');
std::remove(strImgRight.begin(), strImgRight.end(), ' ');
vstrImageLeft.push_back(strPath + "/" + strImgLeft);
vstrImageRight.push_back(strPath + "/" + strImgRight);
double t = stod(item);
vTimeStamps.push_back(t);
}
}
}
void LoadIMU(const string &strImuPath, vector<double> &vTimeStamps, vector<cv::Point3f> &vAcc, vector<cv::Point3f> &vGyro)
{
ifstream fImu;
fImu.open(strImuPath.c_str());
vTimeStamps.reserve(5000);
vAcc.reserve(5000);
vGyro.reserve(5000);
while(!fImu.eof())
{
string s;
getline(fImu,s);
if (s[0] == '#')
continue;
if(!s.empty())
{
// std::cout << "Extracted line is: " << s << std::endl;
string item;
size_t pos = 0;
double data[8];
int count = 0;
while ((pos = s.find(' ')) != string::npos) {
item = s.substr(0, pos);
data[count++] = stod(item);
s.erase(0, pos + 1);
}
item = s.substr(0, pos);
data[7] = stod(item);
std::cout << "Extracted elements are: ";
for (auto el: data){
std::cout << setfill('0') << setw(15) << el << " ";
}
std::cout << std::endl;
vTimeStamps.push_back(data[1]);
vAcc.push_back(cv::Point3f(data[5],data[6],data[7]));
vGyro.push_back(cv::Point3f(data[2],data[3],data[4]));
}
}
}

View File

@@ -0,0 +1,115 @@
%YAML:1.0
#--------------------------------------------------------------------------------------------
# Camera Parameters. Adjust them!
#--------------------------------------------------------------------------------------------
File.version: "1.0"
Camera.type: "PinHole"
Camera1.fx: 278.66723066149086
Camera1.fy: 278.48991409740296
Camera1.cx: 319.75221200593535
Camera1.cy: 241.96858910358173
Camera1.k1: -0.013721808247486035
Camera1.k2: 0.020727425669427896
Camera1.p1: -0.012786476702685545
Camera1.p2: 0.0025242267320687625
Camera2.fx: 277.61640629770613
Camera2.fy: 277.63749695723294
Camera2.cx: 314.8944703346039
Camera2.cy: 236.04310050462587
Camera2.k1: -0.008456929295619607
Camera2.k2: 0.011407590938612062
Camera2.p1: -0.006951788325762078
Camera2.p2: 0.0015368127092821786
Camera.width: 640
Camera.height: 480
# Camera frames per second
Camera.fps: 30
# Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
Camera.RGB: 1
# Close/Far threshold. Baseline times.
Stereo.ThDepth: 60.0
Stereo.T_c1_c2: !!opencv-matrix
rows: 4
cols: 4
dt: f
data: [ 9.99805302e-01, -1.11477581e-02, -1.62814662e-02, 7.96159451e-02,
1.11977385e-02, 9.99932857e-01, 2.98183487e-03, 1.45903657e-04,
1.62471322e-02, -3.16356991e-03, 9.99863002e-01, 8.53393252e-04,
0, 0, 0, 1.000000000000000]
#IMU.T_b_c1: !!opencv-matrix
# rows: 4
# cols: 4
# dt: f
# data: [-0.028228787368606456, -0.999601488301944, 1.2175294828553618e-05, 0.02172388268966517,
# 0.014401251861751119, -0.00041887083271471837, -0.9998962088597202, -6.605455433829172e-05,
# 0.999497743623523, -0.028225682131089447, 0.014407337010089172, -0.00048817563004522853,
# 0.0, 0.0, 0.0, 1.0]
# Transformation from camera 0 to body-frame (imu)
IMU.T_b_c1: !!opencv-matrix
rows: 4
cols: 4
dt: f
data: [-2.82287874e-02, 1.44012519e-02, 9.99497744e-01, 1.10212057e-03,
-9.99601488e-01, -4.18870833e-04, -2.82256821e-02, 2.17014187e-02,
1.21752948e-05, -9.99896209e-01, 1.44073370e-02, -5.92788823e-05,
0.0, 0.0, 0.0, 1.0]
# IMU noise
IMU.NoiseGyro: 0.05
IMU.NoiseAcc: 0.1
IMU.GyroWalk: 4.0e-05
IMU.AccWalk: 0.002
IMU.Frequency: 500.0
#--------------------------------------------------------------------------------------------
# ORB Parameters
#--------------------------------------------------------------------------------------------
# ORB Extractor: Number of features per image
ORBextractor.nFeatures: 1250 # Tested with 1250
# ORB Extractor: Scale factor between levels in the scale pyramid
ORBextractor.scaleFactor: 1.2
# ORB Extractor: Number of levels in the scale pyramid
ORBextractor.nLevels: 8
# ORB Extractor: Fast threshold
# Image is divided in a grid. At each cell FAST are extracted imposing a minimum response.
# Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST
# You can lower these values if your images have low contrast
ORBextractor.iniThFAST: 20
ORBextractor.minThFAST: 7
#--------------------------------------------------------------------------------------------
# Viewer Parameters
#--------------------------------------------------------------------------------------------
Viewer.KeyFrameSize: 0.05
Viewer.KeyFrameLineWidth: 1.0
Viewer.GraphLineWidth: 0.9
Viewer.PointSize: 2.0
Viewer.CameraSize: 0.08
Viewer.CameraLineWidth: 3.0
Viewer.ViewpointX: 0.0
Viewer.ViewpointY: -0.7
Viewer.ViewpointZ: -3.5
Viewer.ViewpointF: 500.0
#--------------------------------------------------------------------------------------------
# Atlas Parameters
#--------------------------------------------------------------------------------------------
#System.LoadAtlasFromFile: "ComplexUrban32"
#System.SaveAtlasToFile: ""

View File

@@ -0,0 +1,115 @@
%YAML:1.0
#--------------------------------------------------------------------------------------------
# Camera Parameters. Adjust them!
#--------------------------------------------------------------------------------------------
File.version: "1.0"
Camera.type: "PinHole"
Camera1.fx: 278.66723066149086
Camera1.fy: 278.48991409740296
Camera1.cx: 319.75221200593535
Camera1.cy: 241.96858910358173
Camera1.k1: -0.013721808247486035
Camera1.k2: 0.020727425669427896
Camera1.p1: -0.012786476702685545
Camera1.p2: 0.0025242267320687625
Camera2.fx: 277.61640629770613
Camera2.fy: 277.63749695723294
Camera2.cx: 314.8944703346039
Camera2.cy: 236.04310050462587
Camera2.k1: -0.008456929295619607
Camera2.k2: 0.011407590938612062
Camera2.p1: -0.006951788325762078
Camera2.p2: 0.0015368127092821786
Camera.width: 640
Camera.height: 480
# Camera frames per second
Camera.fps: 30
# Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
Camera.RGB: 1
# Close/Far threshold. Baseline times.
Stereo.ThDepth: 60.0
Stereo.T_c1_c2: !!opencv-matrix
rows: 4
cols: 4
dt: f
data: [ 9.99805302e-01, -1.11477581e-02, -1.62814662e-02, 7.96159451e-02,
1.11977385e-02, 9.99932857e-01, 2.98183487e-03, 1.45903657e-04,
1.62471322e-02, -3.16356991e-03, 9.99863002e-01, 8.53393252e-04,
0, 0, 0, 1.000000000000000]
#IMU.T_b_c1: !!opencv-matrix
# rows: 4
# cols: 4
# dt: f
# data: [-0.028228787368606456, -0.999601488301944, 1.2175294828553618e-05, 0.02172388268966517,
# 0.014401251861751119, -0.00041887083271471837, -0.9998962088597202, -6.605455433829172e-05,
# 0.999497743623523, -0.028225682131089447, 0.014407337010089172, -0.00048817563004522853,
# 0.0, 0.0, 0.0, 1.0]
# Transformation from camera 0 to body-frame (imu)
IMU.T_b_c1: !!opencv-matrix
rows: 4
cols: 4
dt: f
data: [-2.82287874e-02, 1.44012519e-02, 9.99497744e-01, 1.10212057e-03,
-9.99601488e-01, -4.18870833e-04, -2.82256821e-02, 2.17014187e-02,
1.21752948e-05, -9.99896209e-01, 1.44073370e-02, -5.92788823e-05,
0.0, 0.0, 0.0, 1.0]
# IMU noise
IMU.NoiseGyro: 0.05
IMU.NoiseAcc: 0.1
IMU.GyroWalk: 4.0e-05
IMU.AccWalk: 0.002
IMU.Frequency: 500.0
#--------------------------------------------------------------------------------------------
# ORB Parameters
#--------------------------------------------------------------------------------------------
# ORB Extractor: Number of features per image
ORBextractor.nFeatures: 1250 # Tested with 1250
# ORB Extractor: Scale factor between levels in the scale pyramid
ORBextractor.scaleFactor: 1.2
# ORB Extractor: Number of levels in the scale pyramid
ORBextractor.nLevels: 8
# ORB Extractor: Fast threshold
# Image is divided in a grid. At each cell FAST are extracted imposing a minimum response.
# Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST
# You can lower these values if your images have low contrast
ORBextractor.iniThFAST: 20
ORBextractor.minThFAST: 7
#--------------------------------------------------------------------------------------------
# Viewer Parameters
#--------------------------------------------------------------------------------------------
Viewer.KeyFrameSize: 0.05
Viewer.KeyFrameLineWidth: 1.0
Viewer.GraphLineWidth: 0.9
Viewer.PointSize: 2.0
Viewer.CameraSize: 0.08
Viewer.CameraLineWidth: 3.0
Viewer.ViewpointX: 0.0
Viewer.ViewpointY: -0.7
Viewer.ViewpointZ: -3.5
Viewer.ViewpointF: 500.0
#--------------------------------------------------------------------------------------------
# Atlas Parameters
#--------------------------------------------------------------------------------------------
#System.LoadAtlasFromFile: "ComplexUrban32"
#System.SaveAtlasToFile: ""