implemented frame synchronization and frame skew to match rendered image with sensor image"

;
This commit is contained in:
admin1
2022-08-16 12:44:20 +03:00
parent 1c81d558a8
commit f8843bf0fc
2 changed files with 75 additions and 36 deletions

View File

@@ -3,6 +3,9 @@
//
#include <ros/ros.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>
#include <cv_bridge/cv_bridge.h>
#include <opencv2/core.hpp>
@@ -13,9 +16,9 @@
std::vector<sensor_msgs::Image> sensor_images;
ros::Publisher pub_res, pub_mask, pub_recv_curr_image;
void rendered_image_callback(sensor_msgs::Image msg){
void rendered_image_callback(sensor_msgs::Image::ConstPtr sMsg, sensor_msgs::Image::ConstPtr rMsg){
cv::Mat image, mask;
cv_bridge::CvImagePtr bridge_img = cv_bridge::toCvCopy(msg, "bgr8");
cv_bridge::CvImagePtr bridge_img = cv_bridge::toCvCopy(rMsg, "bgr8");
image = bridge_img->image;
ROS_INFO_STREAM("The image encoding is: " << bridge_img->encoding);
mask = image.clone();
@@ -23,17 +26,26 @@ void rendered_image_callback(sensor_msgs::Image msg){
// find the appropriate image from msgs
cv::Mat current_sensor_image;
ROS_INFO_STREAM("The message sequence number: " << msg.header.seq);
ROS_INFO_STREAM("The sensor_message sequence number: " << sensor_images[sensor_images.size()-1].header.seq);
bool found_flag = false;
for (int i=sensor_images.size()-1;i>0;i--){
if (i == msg.header.seq){
cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sensor_images[i], "bgr8");
cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sMsg, "bgr8");
current_sensor_image = bridge_sensor_img->image;
found_flag = true;
break;
}
}
ROS_INFO_STREAM("The message sequence number: " << rMsg->header.seq);
ROS_INFO_STREAM("The sensor_message sequence number: " << sMsg->header.seq);
ROS_INFO_STREAM("The message stamp is: " << std::fixed << std::setprecision(9) << rMsg->header.stamp.toSec());
ROS_INFO_STREAM("The sensor_message stamp is: " << std::fixed << std::setprecision(9) << sMsg->header.stamp.toSec());
bool found_flag = true;
// since there's the exact match in the timestamps, we don't need to find the closest
// timestamp, therefore, no need to use the Synchronized algorithm from message_filters in ROS.
// for (int i=sensor_images.size()-1;i>=0;i--){
// if (sensor_images[i].header.stamp == msg.header.stamp){
// cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sensor_images[i], "bgr8");
// current_sensor_image = bridge_sensor_img->image;
// found_flag = true;
// break;
// }
// }
if (found_flag){
ROS_INFO_STREAM("imrender: 1");
@@ -57,11 +69,11 @@ void rendered_image_callback(sensor_msgs::Image msg){
// cv::bitwise_and(current_sensor_image, image, masked, mask);
ROS_INFO_STREAM("imrender: 3");
std::string fn = std::to_string(msg.header.stamp.toSec()) + "_cam0.png";
std::string fn = std::to_string(rMsg->header.stamp.toSec()) + "_cam0.png";
cv::imwrite(fn, masked);
std_msgs::Header header;
header.stamp = msg.header.stamp;
header.stamp = rMsg->header.stamp;
header.frame_id = "cam0";
sensor_msgs::ImagePtr masked_msg = cv_bridge::CvImage(header, "bgr8", masked).toImageMsg();
sensor_msgs::ImagePtr mask_msg = cv_bridge::CvImage(header, "mono8", mask).toImageMsg();
@@ -96,6 +108,8 @@ void sensor_image_callback(sensor_msgs::Image msg){
// ROS_INFO_STREAM("imrender: 8");
}
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;
int main(int argc, char* argv[]){
ros::init(argc, argv, "image_render_combine");
@@ -106,8 +120,12 @@ int main(int argc, char* argv[]){
ros::Subscriber sub_rend_image, sub_sensor_image;
sub_sensor_image = nh.subscribe("/stereo/left/image_raw", 1000, &sensor_image_callback);
sub_rend_image = nh.subscribe("/ov_msckf/rendered", 1000, &rendered_image_callback);
// sub_sensor_image = nh.subscribe("/stereo/left/image_raw", 1000, &sensor_image_callback);
// sub_rend_image = nh.subscribe("/ov_msckf/rendered", 1000, &rendered_image_callback);
auto sensor_image_sub = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(nh, "/stereo/left/image_raw", 1);
auto rend_image_sub = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(nh, "/ov_msckf/rendered", 1);
auto sync = std::make_shared<message_filters::Synchronizer<sync_pol>>(sync_pol(10), *sensor_image_sub, *rend_image_sub);
sync->registerCallback(boost::bind(&rendered_image_callback, _1, _2));
pub_res = nh.advertise<sensor_msgs::Image>("/ov_msckf/combined", 2);
pub_mask = nh.advertise<sensor_msgs::Image>("/ov_msckf/mask", 2);
pub_recv_curr_image = nh.advertise<sensor_msgs::Image>("/ov_msckf/recv_curr_image", 2);

View File

@@ -171,30 +171,30 @@ ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_
float yCubesVisPos = -6.0;
float skew = 5.0;
cubeVisPoints = {
{71.215, 32.694, yCubesVisPos},
{71.215, 32.694, yCubesVisPos+3},
// {81.668, 26.227, yCubesVisPos},
{84.455, 41.731, yCubesVisPos},
{84.455, 41.731, yCubesVisPos+3},
// {90.726, 32.825, yCubesVisPos},
{109.541, 60.415, yCubesVisPos},
{109.541, 60.415, yCubesVisPos+3},
// {149.264, 69.490, yCubesVisPos},
{150.645, 89.474, yCubesVisPos},
{150.645, 89.474, yCubesVisPos+2},
// {198.294, 97.113, yCubesVisPos},
{251.391, 154.108, yCubesVisPos},
// {310.030, 190.208, yCubesVisPos},
{330.554, 183.244, yCubesVisPos},
{746.086, 450.563, yCubesVisPos-1},
{746.086, 450.563, yCubesVisPos},
// {792.608, 527.140, yCubesVisPos},
{883.669, 517.444, yCubesVisPos-1},
{957.811, 652.638, yCubesVisPos-1},
{1086.045, 669.317, yCubesVisPos-2},
{1125.310, 728.214, yCubesVisPos-2},
{731.996, 425.021, yCubesVisPos-1},
{883.669, 517.444, yCubesVisPos},
{957.811, 652.638, yCubesVisPos},
{1086.045, 669.317, yCubesVisPos},
{1125.310, 728.214, yCubesVisPos},
{731.996, 425.021, yCubesVisPos},
{1186.527, 700.189, yCubesVisPos-2},
{1186.527, 700.189, yCubesVisPos},
// {1205.432, 681.457, yCubesVisPos},
{1184.375, 735.604, yCubesVisPos-2},
{1184.375, 735.604, yCubesVisPos},
// {1232.697, 715.647, yCubesVisPos},
{1244.767, 784.257, yCubesVisPos-2}
{1244.767, 784.257, yCubesVisPos}
};
cashedCubeVisPoints = cubeVisPoints;
// End of added by PI
@@ -715,16 +715,35 @@ void ROS1Visualizer::publish_images() {
sensor_msgs::ImagePtr exl_msg = cv_bridge::CvImage(header, "bgr8", img_history).toImageMsg();
exl_msg_global = exl_msg;
// Get the original image in order to publish it to the topic for rendering with CameraPub.
cv::Mat original_img;
double original_img_ts;
_app->get_active_image(original_img_ts, original_img);
// usually the function returns -1. so, let's fill it with the ts from camera_last_timestamp.
if (original_img_ts == -1){
original_img_ts = camera_last_timestamp[0];
}
std_msgs::Header orig_img_msg_header;
// as I remember it's the timestamp in kaist is already in seconds.
ROS_INFO_STREAM("org msg: 1");
ros::Time orig_stamp(original_img_ts);
ROS_INFO_STREAM("org msg: 2");
orig_img_msg_header.stamp = orig_stamp;
orig_img_msg_header.frame_id = "cam0";
sensor_msgs::ImagePtr original_img_message = cv_bridge::CvImage(orig_img_msg_header, "bgr8", original_img).toImageMsg();
ROS_INFO_STREAM("The camera last timestamp for cam0 is: " << std::fixed << std::setprecision(21) << original_img_ts);
ROS_INFO_STREAM("Original image width and height are: " << original_img.cols << " " << original_img.rows);
// Publish
it_pub_tracks.publish(exl_msg);
// publish the left image with the visualizations
cv::Range rows(0, img_history.rows);
cv::Range cols(0, img_history.cols / 2);
cv::Mat cropped_image = img_history(rows, cols);
sensor_msgs::ImagePtr sensor_left_msg = cv_bridge::CvImage(header, "bgr8", cropped_image).toImageMsg();
sensor_left_msg->header.seq = sensor_left_id++;
sensor_left_pub.publish(sensor_left_msg);
// cv::Range rows(0, img_history.rows);
// cv::Range cols(0, img_history.cols / 2);
// cv::Mat cropped_image = img_history(rows, cols);
// sensor_msgs::ImagePtr sensor_left_msg = cv_bridge::CvImage(header, "bgr8", cropped_image).toImageMsg();
// sensor_left_msg->header.seq = sensor_left_id++;
sensor_left_pub.publish(original_img_message);
ROS_INFO("Image publish success!");
bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(_app->get_params().camera_intrinsics.at(0)) != nullptr);
@@ -747,6 +766,8 @@ void ROS1Visualizer::publish_images() {
camInfoGlob = cameraparams;
// Added by PI
pub_camera_info.publish(camInfoGlob);
camInfoGlob.header.frame_id = "cam0";
camInfoGlob.header.stamp = orig_img_msg_header.stamp;
camInfoGlob.height = exl_msg->height;
camInfoGlob.width = exl_msg->width / 2;
pub_cam0_info.publish(camInfoGlob);