gps fusion implemented

This commit is contained in:
admin1
2022-08-28 22:25:47 +03:00
parent b8ee6672d1
commit 979c7a2250
27 changed files with 132151 additions and 110 deletions

View File

@@ -0,0 +1,94 @@
<launch>
<!-- what config we are going to run (should match folder name) -->
<arg name="verbosity" default="INFO" /> <!-- ALL, DEBUG, INFO, WARNING, ERROR, SILENT -->
<arg name="config" default="uzh-fpv" /> <!-- euroc_mav, tum_vi, rpng_aruco -->
<arg name="config_path" default="$(find ov_msckf)/../config/$(arg config)/estimator_config.yaml" />
<!-- mono or stereo and what ros bag to play -->
<arg name="max_cameras" default="2" />
<arg name="use_stereo" default="true" />
<arg name="bag_start" default="0" /> <!-- v1-2: 0, mh1: 40, mh2: 35, mh3: 17.5, mh4-5: 15 -->
<arg name="dataset" default="indoor_forward_3_snapdragon_with_gt" /> <!-- V1_01_easy, V1_02_medium, V2_02_medium -->
<arg name="dobag" default="false" /> <!-- if we should play back the bag -->
<!-- <arg name="bag" default="/media/patrick/RPNG\ FLASH\ 3/$(arg config)/$(arg dataset).bag" /> -->
<arg name="bag" default="/home/pi/work_drivecast/datasets/$(arg config)/$(arg dataset).bag" />
<!-- <arg name="bag" default="/datasets/$(arg config)/$(arg dataset).bag" />-->
<!-- saving trajectory path and timing information -->
<arg name="dosave" default="true" />
<arg name="dotime" default="true" />
<arg name="path_est" default="/tmp/traj_estimate.txt" />
<arg name="path_time" default="/tmp/traj_timing.txt" />
<!-- if we should viz the groundtruth -->
<arg name="dolivetraj" default="false" />
<arg name="path_gt" default="$(find ov_data)/$(arg config)/$(arg dataset).txt" />
<!-- MASTER NODE! -->
<!-- <node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true" launch-prefix="gdb -ex run &#45;&#45;args">-->
<node name="image_render_combine" pkg="ov_msckf" type="image_render_combine" output="screen" clear_params="true" required="true"> </node>
<node name="render_cubes" pkg="ov_msckf" type="render_cubes" output="screen" clear_params="true" required="true"></node>
<node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true">
<!-- master configuration object -->
<param name="verbosity" type="string" value="$(arg verbosity)" />
<param name="config_path" type="string" value="$(arg config_path)" />
<!-- world/filter parameters -->
<param name="use_stereo" type="bool" value="$(arg use_stereo)" />
<param name="max_cameras" type="int" value="$(arg max_cameras)" />
<!-- timing statistics recording -->
<param name="record_timing_information" type="bool" value="$(arg dotime)" />
<param name="record_timing_filepath" type="string" value="$(arg path_time)" />
<remap from="/imu/data_raw" to="/kitti/oxts/imu"/>
<remap from="/stereo/left/image_raw" to="/kitti/camera_color_left/image_raw"/>
<remap from="/stereo/right/image_raw" to="/kitti/camera_color_right/image_raw"/>
</node>
<!-- play the dataset -->
<group if="$(arg dobag)">
<node pkg="rosbag" type="play" name="rosbag" args="-d 1 -s $(arg bag_start) $(arg bag)" required="true"/>
</group>
<!-- record the trajectory if enabled -->
<group if="$(arg dosave)">
<node name="recorder_estimate" pkg="ov_eval" type="pose_to_file" output="screen" required="true">
<param name="topic" type="str" value="/ov_msckf/poseimu" />
<param name="topic_type" type="str" value="PoseWithCovarianceStamped" />
<param name="output" type="str" value="$(arg path_est)" />
</node>
</group>
<!-- path viz of aligned gt -->
<group if="$(arg dolivetraj)">
<node name="live_align_trajectory" pkg="ov_eval" type="live_align_trajectory" output="log" clear_params="true">
<param name="alignment_type" type="str" value="posyaw" />
<param name="path_gt" type="str" value="$(arg path_gt)" />
</node>
</group>
<group ns="camera1">
<!-- <node pkg="tf" type="static_transform_publisher" name="camera_broadcaster"
args="0.1 0.1 -0.5 0 0 0 1 map camera1 10" /> -->
<node name="camera_info" pkg="rostopic" type="rostopic"
args="pub camera_info sensor_msgs/CameraInfo
'{header: {seq: 0, stamp: {secs: 0, nsecs: 0}, frame_id: 'cam0'},
height: 480, width: 640, distortion_model: 'plumb_bob',
D: [-5.6143027800000002e-02, 1.3952563200000001e-01, -1.2155906999999999e-03],
K: [500.0, 0.0, 320, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0],
R: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0],
P: [500.0, 0.0, 320, 0.0, 0.0, 500, 240, 0.0, 0.0, 0.0, 1.0, 0.0],
binning_x: 0, binning_y: 0,
roi: {x_offset: 0, y_offset: 0, height: 480, width: 640, do_rectify: false}}' -r 2"
output="screen"/>
</group>
</launch>

View File

@@ -16,7 +16,11 @@
std::vector<sensor_msgs::Image> sensor_images;
ros::Publisher pub_res, pub_mask, pub_recv_curr_image;
// sMsg - sensor message. rMsg - render message
void rendered_image_callback(sensor_msgs::Image::ConstPtr sMsg, sensor_msgs::Image::ConstPtr rMsg){
// publish the left image to the topic needed for
// pub_recv_curr_image.publish(sMsg);
cv::Mat image, mask;
cv_bridge::CvImagePtr bridge_img = cv_bridge::toCvCopy(rMsg, "bgr8");
image = bridge_img->image;
@@ -128,7 +132,7 @@ int main(int argc, char* argv[]){
sync->registerCallback(boost::bind(&rendered_image_callback, _1, _2));
pub_res = nh.advertise<sensor_msgs::Image>("/ov_msckf/combined", 2);
pub_mask = nh.advertise<sensor_msgs::Image>("/ov_msckf/mask", 2);
pub_recv_curr_image = nh.advertise<sensor_msgs::Image>("/ov_msckf/recv_curr_image", 2);
// pub_recv_curr_image = nh.advertise<sensor_msgs::Image>("/global_fusion_node/sensor_left", 2);
ros::spin();

View File

@@ -1,19 +1,36 @@
//
// Created by Podmogilnyi Ivan on 17.08.2022.
//
// C++ std library
#include <fstream>
#include <filesystem>
// ROS library
#include <ros/ros.h>
#include <sensor_msgs/Imu.h>
#include <sensor_msgs/Image.h>
#include <visualization_msgs/Marker.h>
#include <sensor_msgs/CameraInfo.h>
#include <geometry_msgs/TransformStamped.h>
#include <nav_msgs/Path.h>
float yCubesVisPos = -2.0;
// open_vis library
#include <utils/quat_ops.h>
// thirdparty libraries
#include <Eigen/Core>
float yCubesVisPos = -0.5;
float skew = 5.0;
std::vector<std::vector<double>> cubeVisPoints = {
{71.215, 32.694, yCubesVisPos+2.5},
{71.215, 32.694, yCubesVisPos+2.0},
// {81.668, 26.227, yCubesVisPos},
{104.061, 36.691, yCubesVisPos+2.0},
{104.061, 36.691, yCubesVisPos+1.5},
// {90.726, 32.825, yCubesVisPos},
{109.541, 60.415, yCubesVisPos+1.5},
{109.541, 60.415, yCubesVisPos+1.0},
// {149.264, 69.490, yCubesVisPos},
{183.605, 91.474, yCubesVisPos+0.5},
// {198.294, 97.113, yCubesVisPos},
@@ -32,12 +49,248 @@ std::vector<std::vector<double>> cubeVisPoints = {
// {1205.432, 681.457, yCubesVisPos},
{1184.375, 735.604, yCubesVisPos},
// {1232.697, 715.647, yCubesVisPos},
{1244.767, 784.257, yCubesVisPos}
};
{1244.767, 784.257, yCubesVisPos},
ros::Publisher pub_cubes;
ros::Subscriber sub_imu;
// CUBES FOR THE GPS ALIGNED TRAJECTORY
{-5.473, 15.819, 0.000},
{-15.586, 25.062, 0.000},
{-22.395, 43.562, 0.000},
{-36.035, 50.009, 0.000},
{-41.113, 62.946, 0.000},
{-47.675, 62.477, 0.000},
{-56.081, 76.612, 0.000},
{-68.155, 82.382, 0.000},
{-76.903, 97.032, 0.000},
{-93.218, 103.532, 0.000},
{-121.903, 137.353, 0.000},
{-134.567, 140.840, 0.000},
{-158.030, 169.709, 0.000},
{-170.521, 177.370, 0.000},
{-225.440, 236.153, 0.000},
{-259.810, 259.846, 0.000},
{-287.220, 297.381, 0.000},
{-313.876, 312.375, 0.000},
{-446.905, 450.175, 0.000},
{-489.056, 477.187, 0.000},
{-544.828, 555.099, 0.000},
{-568.349, 556.457, 0.000},
{-638.075, 643.542, 0.000},
{-702.377, 676.867, 0.000},
{-753.507, 750.003, 0.000},
{-807.197, 768.580, 0.000},
{-863.780, 829.878, 0.000},
{-877.002, 860.185, 0.000},
{-905.568, 864.266, 0.000},
{-915.407, 887.186, 0.000},
{-931.954, 899.596, 0.000},
{-936.091, 907.870, 0.000},
{-971.656, 952.685, 0.000},
{-1012.476, 947.723, 0.000},
{-1016.629, 985.880, 0.000},
{-1065.223, 987.986, 0.000},
{-1078.723, 1012.979, 0.000},
{-1145.907, 1035.300, 0.000},
{-1234.198, 1116.285, 0.000},
{-1338.651, 1152.282, 0.000},
{-1422.556, 1202.064, 0.000},
{-1488.881, 1208.139, 0.000},
{-1613.822, 1281.229, 0.000},
{-1682.561, 1274.593, 0.000},
{-1927.827, 1384.315, 0.000},
{-2035.692, 1400.027, 0.000},
{-2224.536, 1462.204, 0.000},
{-2313.168, 1458.187, 0.000},
{-2436.409, 1496.333, 0.000},
{-2503.565, 1473.606, 0.000},
{-2605.994, 1481.901, 0.000},
{-2563.683, 1431.523, 0.000},
{-2599.753, 1395.453, 0.000},
{-2679.961, 1407.933, 0.000},
{-2630.649, 1438.753, 0.000},
{-2581.338, 1506.557, 0.000},
{-2528.483, 1197.294, 0.000},
{-2553.878, 1178.248, 0.000},
{-2553.878, 1108.412, 0.000},
{-2450.511, 1023.108, 0.000},
{-2450.511, 967.579, 0.000},
{-2437.486, 890.971, 0.000},
{-2450.511, 819.504, 0.000},
{-2365.134, 690.688, 0.000},
{-2296.590, 541.293, 0.000},
{-2283.679, 620.835, 0.000},
{-2151.879, 318.478, 0.000},
{-2176.409, 287.816, 0.000},
{-2102.820, 251.022, 0.000},
{-2127.350, 189.698, 0.000},
{-2028.905, 122.896, 0.000},
{-2046.821, 66.008, 0.000},
{-1958.025, 1.247, 0.000},
{-1886.644, -52.849, 0.000},
{-1863.596, -64.373, 0.000},
{-1783.239, -134.282, 0.000},
{-1705.906, -151.254, 0.000},
{-1617.356, -206.598, 0.000},
{-1565.032, -222.410, 0.000},
{-1465.740, -282.723, 0.000},
{-1361.237, -309.086, 0.000},
{-1276.829, -355.967, 0.000},
{-1236.832, -350.967, 0.000},
{-1170.082, -368.522, 0.000},
{-1145.877, -386.675, 0.000},
{-1133.776, -374.573, 0.000},
{-1064.985, -381.088, 0.000},
{-1060.602, -409.499, 0.000},
{-943.535, -368.153, 0.000},
{-852.780, -365.980, 0.000},
{-747.917, -375.279, 0.000},
{-694.244, -346.169, 0.000},
{-724.264, -360.724, 0.000},
{-601.465, -334.072, 0.000},
{-557.205, -336.465, 0.000},
{-564.189, -332.895, 0.000},
{-520.468, -340.201, 0.000},
{-476.908, -317.140, 0.000},
{-417.040, -318.781, 0.000},
{-377.842, -296.022, 0.000},
{-309.273, -302.223, 0.000},
{-267.572, -281.072, 0.000},
{-233.495, -287.929, 0.000},
{-211.795, -257.754, 0.000},
{-209.412, -234.544, 0.000},
{-144.978, -252.842, 0.000},
{-86.550, -255.100, 0.000},
{-57.889, -200.404, 0.000},
{-12.395, -168.373, 0.000},
{-8.988, -117.709, 0.000},
{-5.494, -73.048, 0.000},
{12.541, -24.783, 0.000},
{-8.001, 22.391, 0.000},
{-7.999, -85.193, 0.000},
{-61.402, 98.350, 0.000}
};
std::vector<std::vector<double>> cashedCubeVisPoints;
ros::Publisher pub_cubes, pub_aligned_traj, pub_sensor_left_gps;
ros::Subscriber sub_imu, subs_T_align, sub_stereo_left;
visualization_msgs::Marker Marker;
std::ofstream fT_aligned;
nav_msgs::Path curr_path;
void callback_imu(const sensor_msgs::Imu::ConstPtr &msg){
ROS_INFO_STREAM("received imu");
@@ -45,17 +298,109 @@ void callback_imu(const sensor_msgs::Imu::ConstPtr &msg){
}
void T_align_callback(const geometry_msgs::TransformStamped &msg){
// the main thing
const uint pointsLen = cubeVisPoints.size();
std::cout << "1" << std::endl;
Eigen::MatrixXd points(pointsLen, 4);
Eigen::Matrix4d T_align;
Eigen::Vector4d qRot;
qRot << msg.transform.rotation.x, msg.transform.rotation.y, msg.transform.rotation.z, msg.transform.rotation.w;
Eigen::Matrix3d mRot = ov_core::quat_2_Rot(qRot);
for (int i=0;i<3;i++){
for (int j=0;j<3;j++){
T_align(i,j) = mRot(i,j);
}
}
std::cout << "2" << std::endl;
T_align(0,3) = msg.transform.translation.x;
T_align(1,3) = msg.transform.translation.y;
T_align(2,3) = msg.transform.translation.z;
T_align(3,3) = 1;
std::cout << "3" << std::endl;
for (int i=0;i<cubeVisPoints.size();i++){
points(i, 0) = cashedCubeVisPoints[0][0];
points(i, 1) = cashedCubeVisPoints[0][1];
points(i, 2) = cashedCubeVisPoints[0][2];
points(i, 3) = 1;
}
std::cout << "4" << std::endl;
std::cout << "Extracted T_align: " << T_align << std::endl;
std::stringstream T_align_output;
T_align_output << T_align << std::endl;
ROS_INFO(T_align_output.str().c_str());
fT_aligned << msg.header.stamp.toSec() << std::endl << T_align_output.str().c_str() << std::endl;
points = (T_align * points.transpose()).transpose();
for (int i=0;i<cubeVisPoints.size();i++){
cubeVisPoints[0][0] = points(i, 0);
cubeVisPoints[0][1] = points(i, 1);
cubeVisPoints[0][2] = points(i, 2);
}
std::cout << "5" << std::endl;
// Align the trajectory and publish to see the visualization of obtained R and t.
Eigen::MatrixXd poses(curr_path.poses.size(), 4);
// extract
for (int i=0;i<curr_path.poses.size();i++){
poses(i, 0) = curr_path.poses[i].pose.position.x;
poses(i, 1) = curr_path.poses[i].pose.position.y;
poses(i, 2) = curr_path.poses[i].pose.position.z;
}
// align
auto aligned_poses = (T_align * poses.transpose()).transpose();
// fill again
for (int i=0;i<curr_path.poses.size();i++){
curr_path.poses[i].pose.position.x = poses(i, 0);
curr_path.poses[i].pose.position.y = poses(i, 1);
curr_path.poses[i].pose.position.z = poses(i, 2);
}
// publish
pub_aligned_traj.publish(curr_path);
}
void callback_stereo_left_gps(sensor_msgs::ImagePtr msg){
msg->header.frame_id = "cam0_gps";
pub_sensor_left_gps.publish(msg);
}
int main(int argc, char* argv[]){
ros::init(argc, argv, "render_cubes");
ros::NodeHandle nh;
fT_aligned.open("T_aligned_history.txt");
if (!fT_aligned.is_open()){
ROS_INFO("error opening file!");
}
else{
ROS_INFO("file open success!");
std::string curr_path = std::filesystem::current_path();
std::string output_str = "current path is: " + curr_path;
ROS_INFO(output_str.c_str());
}
// the idea: set the subscriber for IMU, and every time the IMU measurement comes, publish
// the cubes because the images are also publish when the IMU measurement arrives.
// the cubes because the images are also published when the IMU measurement arrives.
sub_imu = nh.subscribe("/imu/data_raw", 1000, &callback_imu);
pub_cubes = nh.advertise<visualization_msgs::Marker>("/ov_msckf/vis_markers", 0);
pub_aligned_traj = nh.advertise<nav_msgs::Path>("/ov_msckf/pathaligned", 2);
Marker.header.frame_id = "global";
// a lame fix: create the subscirber here which sends the message from /stereo/left/image_raw to /global_fusion_node/sensor_left
sub_stereo_left = nh.subscribe("/stereo/left/image_raw", 1000, &callback_stereo_left_gps);
pub_sensor_left_gps = nh.advertise<sensor_msgs::Image>("/global_fusion_node/sensor_left", 2);
//// UNCOMMENT THIS TO RESTORE THE T_align_callback
// subs_T_align = nh.subscribe("/ov_msckf/T_align", 1000, T_align_callback);
Marker.header.frame_id = "gps_global";
Marker.header.stamp = ros::Time();
Marker.ns = "my_namespace";
Marker.id = 0;

View File

@@ -93,9 +93,6 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
}
fT_aligned << "ts mT_aligned" << std::endl;
//// UNCOMMENT THIS TO RESTORE THE T_align_callback
// subs_T_align = nh->subscribe("/ov_msckf/T_align", 1000, &ROS1Visualizer::T_align_callback, this);
std::ifstream fileTraj;
fileTraj.open("/home/pi/workspace/catkin_ws_ov/src/open_vins/ov_data/merged.txt");
Eigen::Matrix<double, 7, 1> extractedValues;
@@ -136,6 +133,9 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/camera_info", 2);
pub_cam0_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/cam0_info", 2);
pub_gps_cam0_info = nh->advertise<sensor_msgs::CameraInfo>("/global_fusion_node/camera_info", 2);
//sensor_left_gps_pub = it.advertise("/global_fusion_node/sensor_left", 2);
//// UNCOMMENT THIS IN ORDER TO RESTORE THE CUBES RENDERING
// vis_pub = nh->advertise<visualization_msgs::Marker>("/ov_msckf/vis_markers", 0);
// pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_mskcf/camera_info", 2);
@@ -755,6 +755,9 @@ void ROS1Visualizer::publish_images() {
// sensor_msgs::ImagePtr sensor_left_msg = cv_bridge::CvImage(header, "bgr8", cropped_image).toImageMsg();
// sensor_left_msg->header.seq = sensor_left_id++;
sensor_left_pub.publish(original_img_message);
original_img_message->header.frame_id = "cam0_gps";
sensor_left_gps_pub.publish(original_img_message);
ROS_INFO("Image publish success!");
bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(_app->get_params().camera_intrinsics.at(0)) != nullptr);
@@ -783,6 +786,15 @@ void ROS1Visualizer::publish_images() {
camInfoGlob.width = exl_msg->width / 2;
pub_cam0_info.publish(camInfoGlob);
// publish caminfo for GPS
auto camInfoGPS = sensor_msgs::CameraInfo();
camInfoGPS = cameraparams;
camInfoGPS.header.frame_id = "cam0_gps";
camInfoGPS.header.stamp = orig_img_msg_header.stamp;
camInfoGPS.height = exl_msg->height;
camInfoGPS.width = exl_msg->width / 2;
pub_gps_cam0_info.publish(camInfoGPS);
//// UNCOMMENT THIS TO RESTORE THE CUBES PUBLISHING
// for (auto & cubeVisPoint : cubeVisPoints){
// geometry_msgs::Point tempPoint;
@@ -1041,33 +1053,33 @@ void ROS1Visualizer::publish_loopclosure_information() {
pub_loop_intrinsics.publish(cameraparams);
// Added by PI
int helllo = 0;
// pub_camera_info.publish(cameraparams);
// pub_camera_info_trackhist.publish(cameraparams);
// {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1}
cameraparams.P = {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1, 0};
std::cout << "Testing message" << std::endl;
std::cout << "cameraparams.P: " << cameraparams.P.data() << std::endl;
PRINT_DEBUG("cameraparams.P: %d", cameraparams.P.data());
sensor_msgs::CameraInfoPtr cameraParamsPtr;
cameraParamsPtr->header = cameraparams.header;
cameraParamsPtr->D = cameraparams.D;
cameraParamsPtr->K = cameraparams.K;
cameraParamsPtr->P = cameraparams.P;
cameraParamsPtr->R = cameraparams.R;
cameraParamsPtr->distortion_model = cameraparams.distortion_model;
cameraParamsPtr->binning_x = cameraparams.binning_x;
cameraParamsPtr->binning_y = cameraparams.binning_y;
cameraParamsPtr->height = cameraparams.height;
cameraParamsPtr->roi = cameraparams.roi;
cameraParamsPtr->width = cameraparams.width;
camInfoGlob = sensor_msgs::CameraInfo();
camInfoPtrGlob = cameraParamsPtr;
camInfoGlob = cameraparams;
// camPub.publish(exl_msg_global, cameraparams);
camPub.publish(exl_msg_global, cameraParamsPtr);
// int helllo = 0;
//// pub_camera_info.publish(cameraparams);
//// pub_camera_info_trackhist.publish(cameraparams);
//// {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1}
// cameraparams.P = {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1, 0};
// std::cout << "Testing message" << std::endl;
// std::cout << "cameraparams.P: " << cameraparams.P.data() << std::endl;
// PRINT_DEBUG("cameraparams.P: %d", cameraparams.P.data());
//
// sensor_msgs::CameraInfoPtr cameraParamsPtr;
// cameraParamsPtr->header = cameraparams.header;
// cameraParamsPtr->D = cameraparams.D;
// cameraParamsPtr->K = cameraparams.K;
// cameraParamsPtr->P = cameraparams.P;
// cameraParamsPtr->R = cameraparams.R;
// cameraParamsPtr->distortion_model = cameraparams.distortion_model;
// cameraParamsPtr->binning_x = cameraparams.binning_x;
// cameraParamsPtr->binning_y = cameraparams.binning_y;
// cameraParamsPtr->height = cameraparams.height;
// cameraParamsPtr->roi = cameraparams.roi;
// cameraParamsPtr->width = cameraparams.width;
//
// camInfoGlob = sensor_msgs::CameraInfo();
// camInfoPtrGlob = cameraParamsPtr;
// camInfoGlob = cameraparams;
//// camPub.publish(exl_msg_global, cameraparams);
// camPub.publish(exl_msg_global, cameraParamsPtr);
}
//======================================================
@@ -1171,70 +1183,3 @@ void ROS1Visualizer::publish_loopclosure_information() {
}
}
void ROS1Visualizer::T_align_callback(const geometry_msgs::TransformStamped &msg){
// the main thing
const uint pointsLen = cubeVisPoints.size();
std::cout << "1" << std::endl;
Eigen::MatrixXd points(pointsLen, 4);
Eigen::Matrix4d T_align;
Eigen::Vector4d qRot;
qRot << msg.transform.rotation.x, msg.transform.rotation.y, msg.transform.rotation.z, msg.transform.rotation.w;
Eigen::Matrix3d mRot = ov_core::quat_2_Rot(qRot);
for (int i=0;i<3;i++){
for (int j=0;j<3;j++){
T_align(i,j) = mRot(i,j);
}
}
std::cout << "2" << std::endl;
T_align(0,3) = msg.transform.translation.x;
T_align(1,3) = msg.transform.translation.y;
T_align(2,3) = msg.transform.translation.z;
T_align(3,3) = 1;
std::cout << "3" << std::endl;
for (int i=0;i<cubeVisPoints.size();i++){
points(i, 0) = cashedCubeVisPoints[0][0];
points(i, 1) = cashedCubeVisPoints[0][1];
points(i, 2) = cashedCubeVisPoints[0][2];
points(i, 3) = 1;
}
std::cout << "4" << std::endl;
std::cout << "Extracted T_align: " << T_align << std::endl;
std::stringstream T_align_output;
T_align_output << T_align << std::endl;
ROS_INFO(T_align_output.str().c_str());
fT_aligned << last_visualization_timestamp << std::endl << T_align_output.str().c_str() << std::endl;
points = (T_align * points.transpose()).transpose();
for (int i=0;i<cubeVisPoints.size();i++){
cubeVisPoints[0][0] = points(i, 0);
cubeVisPoints[0][1] = points(i, 1);
cubeVisPoints[0][2] = points(i, 2);
}
std::cout << "5" << std::endl;
// Align the trajectory and publish to see the visualization of obtained R and t.
Eigen::MatrixXd poses(curr_path.poses.size(), 4);
// extract
for (int i=0;i<curr_path.poses.size();i++){
poses(i, 0) = curr_path.poses[i].pose.position.x;
poses(i, 1) = curr_path.poses[i].pose.position.y;
poses(i, 2) = curr_path.poses[i].pose.position.z;
}
// align
auto aligned_poses = (T_align * poses.transpose()).transpose();
// fill again
for (int i=0;i<curr_path.poses.size();i++){
curr_path.poses[i].pose.position.x = poses(i, 0);
curr_path.poses[i].pose.position.y = poses(i, 1);
curr_path.poses[i].pose.position.z = poses(i, 2);
}
// publish
pub_aligned_traj.publish(curr_path);
}

View File

@@ -163,12 +163,12 @@ protected:
// Added by PI on 27.07.2022
// image_transport::CameraPublisher pub_camera_info, pub_camera_info_trackhist;
image_transport::CameraPublisher camPub;
image_transport::Publisher sensor_left_pub;
image_transport::Publisher sensor_left_pub, sensor_left_gps_pub;
visualization_msgs::Marker Marker;
std::vector<std::vector<double>> cubeVisPoints, cashedCubeVisPoints;
image_transport::CameraSubscriber camSub;
sensor_msgs::CameraInfoPtr camInfoPtrGlob;
ros::Publisher pub_camera_info, pub_cam0_info;
ros::Publisher pub_camera_info, pub_cam0_info, pub_gps_cam0_info;
ros::Publisher vis_pub;
sensor_msgs::CameraInfo camInfoGlob;
sensor_msgs::ImagePtr exl_msg_global;