created rosnode to render images

This commit is contained in:
admin1
2022-08-15 09:54:33 +03:00
parent fd031c7ecf
commit 1c81d558a8
8 changed files with 277 additions and 67 deletions

View File

@@ -3,11 +3,11 @@
<!-- what ros bag to play --> <!-- what ros bag to play -->
<arg name="bag_name" default="merged" /> <arg name="bag_name" default="merged" />
<arg name="bag_path" default="/home/pi/work_drivecast/datasets/complex_urban_dataset/urban32-yeouido/bag2" /> <arg name="bag_path" default="/home/admin1/podmivan/datasets/complex-urban/urban32-yeouido/bag" />
<!-- where to save the recorded poses --> <!-- where to save the recorded poses -->
<arg name="path_save" default="/home/pi/workspace/catkin_ws_ov/src/open_vins/ov_data/" /> <arg name="path_save" default="/home/admin1/workspace/catkin_ws_ov/src/open_vins/ov_data/" />
<!-- record the trajectory --> <!-- record the trajectory -->
@@ -20,7 +20,7 @@
<node name="live_align_trajectory" pkg="ov_eval" type="live_align_trajectory" output="screen" required="true"> <node name="live_align_trajectory" pkg="ov_eval" type="live_align_trajectory" output="screen" required="true">
<param name="alignment_type" type="str" value="posyaw" /> <param name="alignment_type" type="str" value="posyaw" />
<param name="path_gt" type="str" value="/home/pi/workspace/catkin_ws_ov/src/open_vins/ov_data/merged.txt" /> <param name="path_gt" type="str" value="/home/admin1/workspace/catkin_ws_ov/src/open_vins/ov_data/merged.txt" />
<param name="verbosity" type="str" value="DEBUG" /> <param name="verbosity" type="str" value="DEBUG" />
</node> </node>

View File

@@ -10,7 +10,7 @@ if (NOT OpenCV_FOUND)
endif () endif ()
find_package(Boost REQUIRED COMPONENTS system filesystem thread date_time) find_package(Boost REQUIRED COMPONENTS system filesystem thread date_time)
find_package(Ceres REQUIRED) find_package(Ceres REQUIRED)
include_directories(/home/pi/work_drivecast/slams/ORB-SLAM3_Linux/Thirdparty/Sophus) include_directories(/home/admin1/podmivan/SLAM/ORB_SLAM3/Thirdparty/Sophus)
message(STATUS "OPENCV: " ${OpenCV_VERSION} " | BOOST: " ${Boost_VERSION} " | CERES: " ${Ceres_VERSION}) message(STATUS "OPENCV: " ${OpenCV_VERSION} " | BOOST: " ${Boost_VERSION} " | CERES: " ${Ceres_VERSION})
# If we will compile with aruco support # If we will compile with aruco support

View File

@@ -126,6 +126,13 @@ if (catkin_FOUND AND ENABLE_ROS)
RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
) )
add_executable(image_render_combine src/imrender_combine.cpp)
target_link_libraries(image_render_combine ov_msckf_lib ${thirdparty_libraries})
install(TARGETS image_render_combine
ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
RUNTIME DESTINATION $[CATKIN_PACKAGE_BIN_DESTINATION})
endif () endif ()
add_executable(run_simulation src/run_simulation.cpp) add_executable(run_simulation src/run_simulation.cpp)

View File

@@ -52,6 +52,22 @@ Visualization Manager:
Plane Cell Count: 80 Plane Cell Count: 80
Reference Frame: <Fixed Frame> Reference Frame: <Fixed Frame>
Value: true Value: true
- Class: rviz_camera_stream/CameraPub
Enabled: true
Frame Rate: 10
Image Topic: /ov_msckf/rendered
Camera Info Topic: /ov_msckf/camera0_info
Name: CameraPub1
Queue Size: 2
Value: true
Visibility:
Axes: true
CameraPub2: true
Grid: true
Image1: true
Image2: true
TF: true
Value: true
- Class: rviz/Image - Class: rviz/Image
Enabled: true Enabled: true
Image Topic: /ov_msckf/trackhist Image Topic: /ov_msckf/trackhist
@@ -61,10 +77,9 @@ Visualization Manager:
Name: Image Tracks Name: Image Tracks
Normalize Range: true Normalize Range: true
Queue Size: 2 Queue Size: 2
Transport Hint: compressed Transport Hint: raw
Unreliable: false Unreliable: false
Value: true Value: true
Camera Info Topic: /ov_msckf/camera_info
- Class: rviz/Image - Class: rviz/Image
Enabled: true Enabled: true
Image Topic: /ov_msckf/loop_depth_colored Image Topic: /ov_msckf/loop_depth_colored
@@ -77,7 +92,6 @@ Visualization Manager:
Transport Hint: compressed Transport Hint: compressed
Unreliable: false Unreliable: false
Value: true Value: true
- Camera Info Topic: /ov_msckf/camera_info
- Alpha: 1 - Alpha: 1
Buffer Length: 1 Buffer Length: 1
Class: rviz/Path Class: rviz/Path
@@ -291,6 +305,14 @@ Visualization Manager:
TF: true TF: true
Value: true Value: true
Zoom Factor: 1 Zoom Factor: 1
- Class: rviz/Marker
Enabled: true
Marker Topic: /ov_msckf/vis_markers
Name: Marker
Namespaces:
{}
Queue Size: 100
Value: true
Enabled: true Enabled: true
Global Options: Global Options:
Background Color: 44; 44; 44 Background Color: 44; 44; 44

View File

@@ -16,8 +16,8 @@
<!-- <arg name="bag" default="/datasets/$(arg config)/$(arg dataset).bag" />--> <!-- <arg name="bag" default="/datasets/$(arg config)/$(arg dataset).bag" />-->
<!-- saving trajectory path and timing information --> <!-- saving trajectory path and timing information -->
<arg name="dosave" default="false" /> <arg name="dosave" default="true" />
<arg name="dotime" default="false" /> <arg name="dotime" default="true" />
<arg name="path_est" default="/tmp/traj_estimate.txt" /> <arg name="path_est" default="/tmp/traj_estimate.txt" />
<arg name="path_time" default="/tmp/traj_timing.txt" /> <arg name="path_time" default="/tmp/traj_timing.txt" />
@@ -28,6 +28,7 @@
<!-- MASTER NODE! --> <!-- MASTER NODE! -->
<!-- <node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true" launch-prefix="gdb -ex run &#45;&#45;args">--> <!-- <node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true" launch-prefix="gdb -ex run &#45;&#45;args">-->
<node name="image_render_combine" pkg="ov_msckf" type="image_render_combine" output="screen" clear_params="true" required="true"> </node>
<node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true"> <node name="run_subscribe_msckf" pkg="ov_msckf" type="run_subscribe_msckf" output="screen" clear_params="true" required="true">
<!-- master configuration object --> <!-- master configuration object -->
@@ -66,5 +67,22 @@
</node> </node>
</group> </group>
<group ns="camera1">
<!-- <node pkg="tf" type="static_transform_publisher" name="camera_broadcaster"
args="0.1 0.1 -0.5 0 0 0 1 map camera1 10" /> -->
<node name="camera_info" pkg="rostopic" type="rostopic"
args="pub camera_info sensor_msgs/CameraInfo
'{header: {seq: 0, stamp: {secs: 0, nsecs: 0}, frame_id: 'cam0'},
height: 480, width: 640, distortion_model: 'plumb_bob',
D: [-5.6143027800000002e-02, 1.3952563200000001e-01, -1.2155906999999999e-03],
K: [500.0, 0.0, 320, 0.0, 500.0, 240.0, 0.0, 0.0, 1.0],
R: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0],
P: [500.0, 0.0, 320, 0.0, 0.0, 500, 240, 0.0, 0.0, 0.0, 1.0, 0.0],
binning_x: 0, binning_y: 0,
roi: {x_offset: 0, y_offset: 0, height: 480, width: 640, do_rectify: false}}' -r 2"
output="screen"/>
</group>
</launch> </launch>

View File

@@ -0,0 +1,117 @@
//
// Created by Podmogilnyi Ivan on 11.08.2022.
//
#include <ros/ros.h>
#include <sensor_msgs/Image.h>
#include <cv_bridge/cv_bridge.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
std::vector<sensor_msgs::Image> sensor_images;
ros::Publisher pub_res, pub_mask, pub_recv_curr_image;
void rendered_image_callback(sensor_msgs::Image msg){
cv::Mat image, mask;
cv_bridge::CvImagePtr bridge_img = cv_bridge::toCvCopy(msg, "bgr8");
image = bridge_img->image;
ROS_INFO_STREAM("The image encoding is: " << bridge_img->encoding);
mask = image.clone();
cv::cvtColor(mask, mask, cv::COLOR_BGR2GRAY);
// find the appropriate image from msgs
cv::Mat current_sensor_image;
ROS_INFO_STREAM("The message sequence number: " << msg.header.seq);
ROS_INFO_STREAM("The sensor_message sequence number: " << sensor_images[sensor_images.size()-1].header.seq);
bool found_flag = false;
for (int i=sensor_images.size()-1;i>0;i--){
if (i == msg.header.seq){
cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sensor_images[i], "bgr8");
current_sensor_image = bridge_sensor_img->image;
found_flag = true;
break;
}
}
if (found_flag){
ROS_INFO_STREAM("imrender: 1");
ROS_INFO_STREAM("imrender: 1.2");
cv::Mat masked(image.rows, image.cols, CV_8UC3, cv::Scalar(0, 0, 0));
masked = current_sensor_image.clone();
ROS_INFO_STREAM("imrender: 2" << image.rows << " " << image.cols << " " << current_sensor_image.rows <<
" " << current_sensor_image.cols << " " << masked.rows << " " << masked.cols);
//copy the non zero pixels to masked from image.
for (int i=0;i<mask.rows;i++){
for (int j=0;j<mask.cols;j++){
if (mask.at<cv::uint8_t>(i, j) != 0){
masked.at<cv::Vec3b>(i, j) = image.at<cv::Vec3b>(i, j);
}
}
}
ROS_INFO_STREAM("current image type: " << current_sensor_image.type());
ROS_INFO_STREAM("rendered image type: " << image.type());
ROS_INFO_STREAM("masked image type: " << masked.type());
// cv::bitwise_and(current_sensor_image, image, masked, mask);
ROS_INFO_STREAM("imrender: 3");
std::string fn = std::to_string(msg.header.stamp.toSec()) + "_cam0.png";
cv::imwrite(fn, masked);
std_msgs::Header header;
header.stamp = msg.header.stamp;
header.frame_id = "cam0";
sensor_msgs::ImagePtr masked_msg = cv_bridge::CvImage(header, "bgr8", masked).toImageMsg();
sensor_msgs::ImagePtr mask_msg = cv_bridge::CvImage(header, "mono8", mask).toImageMsg();
ROS_INFO_STREAM("imrender: 4");
pub_res.publish(*masked_msg);
pub_mask.publish(*mask_msg);
ROS_INFO_STREAM("imrender: 5");
// cv::imwrite(fn, masked);
}
else{
ROS_INFO("Not found an image with the appropriate timestamp.");
}
}
void sensor_image_callback(sensor_msgs::Image msg){
sensor_images.push_back(msg);
ROS_INFO_STREAM("The sensor_images size is: " << sensor_images.size());
ROS_INFO_STREAM("The sensor_image seq number is: " << msg.header.seq);
// cv::Mat image;
// cv_bridge::CvImagePtr bridge_msg = cv_bridge::toCvCopy(msg, "bgr8");
// ROS_INFO_STREAM("imrender: 6" << std::endl << "received image encoding is: " << msg.encoding);
// current_sensor_image = bridge_msg->image;
// ROS_INFO_STREAM("imrender: 7" << std::endl << "the number of channels in opencv image: " << current_sensor_image.channels());
// if (current_sensor_image.channels() == 1){
// cv::cvtColor(current_sensor_image, current_sensor_image, cv::COLOR_GRAY2BGR);
// }
//// cv::imshow("currr_image", current_sensor_image);
//// cv::waitKey(1);
// auto current_sensor_image_msg = cv_bridge::CvImage(msg.header, "bgr8", current_sensor_image).toImageMsg();
// pub_recv_curr_image.publish(*current_sensor_image_msg);
// ROS_INFO_STREAM("imrender: 8");
}
int main(int argc, char* argv[]){
ros::init(argc, argv, "image_render_combine");
ros::NodeHandle nh;
sensor_images = {};
ROS_INFO("Hello world!");
ros::Subscriber sub_rend_image, sub_sensor_image;
sub_sensor_image = nh.subscribe("/stereo/left/image_raw", 1000, &sensor_image_callback);
sub_rend_image = nh.subscribe("/ov_msckf/rendered", 1000, &rendered_image_callback);
pub_res = nh.advertise<sensor_msgs::Image>("/ov_msckf/combined", 2);
pub_mask = nh.advertise<sensor_msgs::Image>("/ov_msckf/mask", 2);
pub_recv_curr_image = nh.advertise<sensor_msgs::Image>("/ov_msckf/recv_curr_image", 2);
ros::spin();
}

View File

@@ -57,6 +57,7 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
// Our tracking image // Our tracking image
it_pub_tracks = it.advertise("/ov_msckf/trackhist", 2); it_pub_tracks = it.advertise("/ov_msckf/trackhist", 2);
sensor_left_pub = it.advertise("/ov_msckf/sensor_left", 2);
PRINT_DEBUG("Publishing: %s\n", it_pub_tracks.getTopic().c_str()); PRINT_DEBUG("Publishing: %s\n", it_pub_tracks.getTopic().c_str());
// Groundtruth publishers // Groundtruth publishers
@@ -75,6 +76,23 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
// Added by PI on 27.07.2022 // Added by PI on 27.07.2022
// В какой топик публикуется ? // В какой топик публикуется ?
sensor_left_id = 0;
pub_aligned_traj = nh->advertise<nav_msgs::Path>("/ov_msckf/pathaligned", 2);
fT_aligned.open("T_aligned_history.txt");
if (!fT_aligned.is_open()){
ROS_INFO("error opening file!");
}
else{
ROS_INFO("file open success!");
std::string curr_path = std::filesystem::current_path();
std::string output_str = "current path is: " + curr_path;
ROS_INFO(output_str.c_str());
}
fT_aligned << "ts mT_aligned" << std::endl;
subs_T_align = nh->subscribe("/ov_msckf/T_align", 1000, &ROS1Visualizer::T_align_callback, this); subs_T_align = nh->subscribe("/ov_msckf/T_align", 1000, &ROS1Visualizer::T_align_callback, this);
std::ifstream fileTraj; std::ifstream fileTraj;
@@ -116,6 +134,7 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
// } // }
pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/camera_info", 2); pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/camera_info", 2);
pub_cam0_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/cam0_info", 2);
vis_pub = nh->advertise<visualization_msgs::Marker>("/ov_msckf/vis_markers", 0); vis_pub = nh->advertise<visualization_msgs::Marker>("/ov_msckf/vis_markers", 0);
// pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_mskcf/camera_info", 2); // pub_camera_info = nh->advertise<sensor_msgs::CameraInfo>("/ov_mskcf/camera_info", 2);
// pub_camera_info_trackhist = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/trackhist", 2); // pub_camera_info_trackhist = nh->advertise<sensor_msgs::CameraInfo>("/ov_msckf/trackhist", 2);
@@ -149,60 +168,35 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
// {240, 90, 0.02}, {80, 110, 0.02}, // {240, 90, 0.02}, {80, 110, 0.02},
// {280, 110, 0.02}, {100, 130, 0.02}, // {280, 110, 0.02}, {100, 130, 0.02},
// {320, 130, 0.02}, {320, 150, 0.02}}; // {320, 130, 0.02}, {320, 150, 0.02}};
float yCubesVisPos = -3.0; float yCubesVisPos = -6.0;
float skew = 5.0; float skew = 5.0;
cubeVisPoints = { cubeVisPoints = {
{-33.451, 231.074, yCubesVisPos}, {71.215, 32.694, yCubesVisPos},
{-74.510, 141.228, yCubesVisPos}, // {81.668, 26.227, yCubesVisPos},
{-114.119, -45.468, yCubesVisPos}, {84.455, 41.731, yCubesVisPos},
{96.947, -52.157, yCubesVisPos}, // {90.726, 32.825, yCubesVisPos},
{-183.654, 605.40, yCubesVisPos}, {109.541, 60.415, yCubesVisPos},
{228.287, 146.678, yCubesVisPos}, // {149.264, 69.490, yCubesVisPos},
{363.666, 1769.109, yCubesVisPos}, {150.645, 89.474, yCubesVisPos},
{366.259, 1810.029, yCubesVisPos}, // {198.294, 97.113, yCubesVisPos},
{241.346, 186.492, yCubesVisPos}, {251.391, 154.108, yCubesVisPos},
{424.114, 1789.749, yCubesVisPos}, // {310.030, 190.208, yCubesVisPos},
{588.224, 322.999, yCubesVisPos}, {330.554, 183.244, yCubesVisPos},
{616.916, 1919.684, yCubesVisPos}, {746.086, 450.563, yCubesVisPos-1},
{845.480, 518.359, yCubesVisPos}, // {792.608, 527.140, yCubesVisPos},
{1027.410, 713.207, yCubesVisPos}, {883.669, 517.444, yCubesVisPos-1},
{1027.410, 733.207, yCubesVisPos}, {957.811, 652.638, yCubesVisPos-1},
{1101.795, 667.008, yCubesVisPos}, {1086.045, 669.317, yCubesVisPos-2},
{1100.890, 778.080, yCubesVisPos}, {1125.310, 728.214, yCubesVisPos-2},
{1182.285, 788.335, yCubesVisPos}, {731.996, 425.021, yCubesVisPos-1},
{1593.721, 1111.198, yCubesVisPos},
{1635.912, 1542.791, yCubesVisPos},
{1936.860, 1898.863, yCubesVisPos},
{1865.827, 2190.633, yCubesVisPos},
{1603.437, 2262.750, yCubesVisPos},
// {307.369, 150.830, yCubesVisPos}, {1186.527, 700.189, yCubesVisPos-2},
// {336.567, 223.124, yCubesVisPos}, // {1205.432, 681.457, yCubesVisPos},
// {406.839, 224.973, yCubesVisPos}, {1184.375, 735.604, yCubesVisPos-2},
// {473.202, 296.579, yCubesVisPos}, // {1232.697, 715.647, yCubesVisPos},
// {663.727, 382.915, yCubesVisPos}, {1244.767, 784.257, yCubesVisPos-2}
// {706.092, 446.382, yCubesVisPos},
// {773.465, 464.975, yCubesVisPos},
// {815.571, 524.754, yCubesVisPos},
// {862.600, 560.769, yCubesVisPos},
// {987.206, 590.868, yCubesVisPos},
// {1018.318, 661.753, yCubesVisPos}
{239.974, 72.267, yCubesVisPos},
{285.534, 147.012, yCubesVisPos},
{379.105, 103.735, yCubesVisPos},
{377.834, 190.505, yCubesVisPos},
{589.754, 207.650, yCubesVisPos},
{583.577, 299.558, yCubesVisPos},
{695.422, 271.515, yCubesVisPos},
{683.414, 342.212, yCubesVisPos},
{803.130, 295.558, yCubesVisPos},
{901.909, 445.780, yCubesVisPos},
{104.461, 397.006, yCubesVisPos},
{110.683, 534.233, yCubesVisPos},
{119.329, 502.730, yCubesVisPos},
{238.588, 137.661, yCubesVisPos}
}; };
cashedCubeVisPoints = cubeVisPoints;
// End of added by PI // End of added by PI
// option `to enable publishing of global to IMU transformation // option `to enable publishing of global to IMU transformation
@@ -696,6 +690,9 @@ void ROS1Visualizer::publish_state() {
for (size_t i = 0; i < poses_imu.size(); i += std::floor((double)poses_imu.size() / 16384.0) + 1) { for (size_t i = 0; i < poses_imu.size(); i += std::floor((double)poses_imu.size() / 16384.0) + 1) {
arrIMU.poses.push_back(poses_imu.at(i)); arrIMU.poses.push_back(poses_imu.at(i));
} }
// Changed by PI
curr_path = arrIMU;
pub_pathimu.publish(arrIMU); pub_pathimu.publish(arrIMU);
// Move them forward in time // Move them forward in time
@@ -721,22 +718,38 @@ void ROS1Visualizer::publish_images() {
// Publish // Publish
it_pub_tracks.publish(exl_msg); it_pub_tracks.publish(exl_msg);
// publish the left image with the visualizations
cv::Range rows(0, img_history.rows);
cv::Range cols(0, img_history.cols / 2);
cv::Mat cropped_image = img_history(rows, cols);
sensor_msgs::ImagePtr sensor_left_msg = cv_bridge::CvImage(header, "bgr8", cropped_image).toImageMsg();
sensor_left_msg->header.seq = sensor_left_id++;
sensor_left_pub.publish(sensor_left_msg);
ROS_INFO("Image publish success!");
bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(_app->get_params().camera_intrinsics.at(0)) != nullptr); bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(_app->get_params().camera_intrinsics.at(0)) != nullptr);
sensor_msgs::CameraInfo cameraparams; sensor_msgs::CameraInfo cameraparams;
cameraparams.header = header; cameraparams.header = header;
cameraparams.header.frame_id = "cam0"; cameraparams.header.frame_id = "cam0";
cameraparams.distortion_model = is_fisheye ? "equidistant" : "plumb_bob"; cameraparams.distortion_model = is_fisheye ? "equidistant" : "plumb_bob";
Eigen::VectorXd cparams = _app->get_state()->_cam_intrinsics.at(0)->value(); Eigen::VectorXd cparams = _app->get_state()->_cam_intrinsics.at(0)->value();
std::stringstream ss;
ss << cparams;
std::string str_to_output = "cparams are: "+ ss.str();
ROS_INFO(str_to_output.c_str());
cameraparams.D = {cparams(4), cparams(5), cparams(6), cparams(7)}; cameraparams.D = {cparams(4), cparams(5), cparams(6), cparams(7)};
cameraparams.K = {cparams(0), 0, cparams(2), 0, cparams(1), cparams(3), 0, 0, 1}; cameraparams.K = {cparams(0), 0, cparams(2), 0, cparams(1), cparams(3), 0, 0, 1};
cameraparams.P = {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1, 0}; cameraparams.P = {cparams(0), 0, cparams(2), 0, 0, cparams(1), cparams(3), 0, 0, 0, 1, 0};
PRINT_DEBUG("cameraparams.P: %d", cameraparams.P.data()); PRINT_DEBUG("cameraparams.P: %d", cameraparams.P.data());
ROS_INFO("cameraparams.P: %d", cameraparams.P.data());
camInfoGlob = sensor_msgs::CameraInfo(); camInfoGlob = sensor_msgs::CameraInfo();
camInfoGlob = cameraparams; camInfoGlob = cameraparams;
// Added by PI // Added by PI
pub_camera_info.publish(camInfoGlob); pub_camera_info.publish(camInfoGlob);
camInfoGlob.height = exl_msg->height;
camInfoGlob.width = exl_msg->width / 2;
pub_cam0_info.publish(camInfoGlob);
for (auto & cubeVisPoint : cubeVisPoints){ for (auto & cubeVisPoint : cubeVisPoints){
geometry_msgs::Point tempPoint; geometry_msgs::Point tempPoint;
@@ -1149,12 +1162,17 @@ void ROS1Visualizer::T_align_callback(const geometry_msgs::TransformStamped &msg
std::cout << "3" << std::endl; std::cout << "3" << std::endl;
for (int i=0;i<cubeVisPoints.size();i++){ for (int i=0;i<cubeVisPoints.size();i++){
points(i, 0) = cubeVisPoints[0][0]; points(i, 0) = cashedCubeVisPoints[0][0];
points(i, 1) = cubeVisPoints[0][1]; points(i, 1) = cashedCubeVisPoints[0][1];
points(i, 2) = cubeVisPoints[0][2]; points(i, 2) = cashedCubeVisPoints[0][2];
points(i, 3) = 1; points(i, 3) = 1;
} }
std::cout << "4" << std::endl; std::cout << "4" << std::endl;
std::cout << "Extracted T_align: " << T_align << std::endl;
std::stringstream T_align_output;
T_align_output << T_align << std::endl;
ROS_INFO(T_align_output.str().c_str());
fT_aligned << last_visualization_timestamp << std::endl << T_align_output.str().c_str() << std::endl;
points = (T_align * points.transpose()).transpose(); points = (T_align * points.transpose()).transpose();
for (int i=0;i<cubeVisPoints.size();i++){ for (int i=0;i<cubeVisPoints.size();i++){
@@ -1164,5 +1182,26 @@ void ROS1Visualizer::T_align_callback(const geometry_msgs::TransformStamped &msg
} }
std::cout << "5" << std::endl; std::cout << "5" << std::endl;
// Align the trajectory and publish to see the visualization of obtained R and t.
Eigen::MatrixXd poses(curr_path.poses.size(), 4);
// extract
for (int i=0;i<curr_path.poses.size();i++){
poses(i, 0) = curr_path.poses[i].pose.position.x;
poses(i, 1) = curr_path.poses[i].pose.position.y;
poses(i, 2) = curr_path.poses[i].pose.position.z;
}
// align
auto aligned_poses = (T_align * poses.transpose()).transpose();
// fill again
for (int i=0;i<curr_path.poses.size();i++){
curr_path.poses[i].pose.position.x = poses(i, 0);
curr_path.poses[i].pose.position.y = poses(i, 1);
curr_path.poses[i].pose.position.z = poses(i, 2);
}
// publish
pub_aligned_traj.publish(curr_path);
} }

View File

@@ -25,6 +25,7 @@
#include <fstream> #include <fstream>
#include <sstream> #include <sstream>
#include <string> #include <string>
#include <filesystem>
#include <geometry_msgs/PoseStamped.h> #include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/PoseWithCovarianceStamped.h> #include <geometry_msgs/PoseWithCovarianceStamped.h>
@@ -162,18 +163,24 @@ protected:
// Added by PI on 27.07.2022 // Added by PI on 27.07.2022
// image_transport::CameraPublisher pub_camera_info, pub_camera_info_trackhist; // image_transport::CameraPublisher pub_camera_info, pub_camera_info_trackhist;
image_transport::CameraPublisher camPub; image_transport::CameraPublisher camPub;
image_transport::Publisher sensor_left_pub;
visualization_msgs::Marker Marker; visualization_msgs::Marker Marker;
std::vector<std::vector<double>> cubeVisPoints; std::vector<std::vector<double>> cubeVisPoints, cashedCubeVisPoints;
image_transport::CameraSubscriber camSub; image_transport::CameraSubscriber camSub;
sensor_msgs::CameraInfoPtr camInfoPtrGlob; sensor_msgs::CameraInfoPtr camInfoPtrGlob;
ros::Publisher pub_camera_info; ros::Publisher pub_camera_info, pub_cam0_info;
ros::Publisher vis_pub; ros::Publisher vis_pub;
sensor_msgs::CameraInfo camInfoGlob; sensor_msgs::CameraInfo camInfoGlob;
sensor_msgs::ImagePtr exl_msg_global; sensor_msgs::ImagePtr exl_msg_global;
ros::Subscriber sub_camera_info, subs_T_align; ros::Subscriber sub_camera_info, subs_T_align;
ros::Publisher pub_poserec; ros::Publisher pub_poserec, pub_aligned_traj;
nav_msgs::Path curr_path;
std::ofstream fT_aligned;
std::vector<Eigen::Matrix<double, 7, 1>> loadedTrajectory; std::vector<Eigen::Matrix<double, 7, 1>> loadedTrajectory;
int sensor_left_id;
// End of adding by PI // End of adding by PI
// Our subscribers and camera synchronizers // Our subscribers and camera synchronizers