Files
openvins_linux/ov_msckf/src/imrender_combine.cpp
2022-08-28 22:25:47 +03:00

140 lines
6.2 KiB
C++

//
// Created by Podmogilnyi Ivan on 11.08.2022.
//
#include <ros/ros.h>
#include <sensor_msgs/Image.h>
#include <message_filters/subscriber.h>
#include <message_filters/sync_policies/approximate_time.h>
#include <message_filters/time_synchronizer.h>
#include <cv_bridge/cv_bridge.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
std::vector<sensor_msgs::Image> sensor_images;
ros::Publisher pub_res, pub_mask, pub_recv_curr_image;
// sMsg - sensor message. rMsg - render message
void rendered_image_callback(sensor_msgs::Image::ConstPtr sMsg, sensor_msgs::Image::ConstPtr rMsg){
// publish the left image to the topic needed for
// pub_recv_curr_image.publish(sMsg);
cv::Mat image, mask;
cv_bridge::CvImagePtr bridge_img = cv_bridge::toCvCopy(rMsg, "bgr8");
image = bridge_img->image;
ROS_INFO_STREAM("The image encoding is: " << bridge_img->encoding);
mask = image.clone();
cv::cvtColor(mask, mask, cv::COLOR_BGR2GRAY);
// find the appropriate image from msgs
cv::Mat current_sensor_image;
cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sMsg, "bgr8");
current_sensor_image = bridge_sensor_img->image;
ROS_INFO_STREAM("The message sequence number: " << rMsg->header.seq);
ROS_INFO_STREAM("The sensor_message sequence number: " << sMsg->header.seq);
ROS_INFO_STREAM("The message stamp is: " << std::fixed << std::setprecision(9) << rMsg->header.stamp.toSec());
ROS_INFO_STREAM("The sensor_message stamp is: " << std::fixed << std::setprecision(9) << sMsg->header.stamp.toSec());
bool found_flag = true;
// since there's the exact match in the timestamps, we don't need to find the closest
// timestamp, therefore, no need to use the Synchronized algorithm from message_filters in ROS.
// for (int i=sensor_images.size()-1;i>=0;i--){
// if (sensor_images[i].header.stamp == msg.header.stamp){
// cv_bridge::CvImagePtr bridge_sensor_img = cv_bridge::toCvCopy(sensor_images[i], "bgr8");
// current_sensor_image = bridge_sensor_img->image;
// found_flag = true;
// break;
// }
// }
if (found_flag){
ROS_INFO_STREAM("imrender: 1");
ROS_INFO_STREAM("imrender: 1.2");
cv::Mat masked(image.rows, image.cols, CV_8UC3, cv::Scalar(0, 0, 0));
masked = current_sensor_image.clone();
ROS_INFO_STREAM("imrender: 2" << image.rows << " " << image.cols << " " << current_sensor_image.rows <<
" " << current_sensor_image.cols << " " << masked.rows << " " << masked.cols);
//copy the non zero pixels to masked from image.
for (int i=0;i<mask.rows;i++){
for (int j=0;j<mask.cols;j++){
if (mask.at<cv::uint8_t>(i, j) != 0){
masked.at<cv::Vec3b>(i, j) = image.at<cv::Vec3b>(i, j);
}
}
}
ROS_INFO_STREAM("current image type: " << current_sensor_image.type());
ROS_INFO_STREAM("rendered image type: " << image.type());
ROS_INFO_STREAM("masked image type: " << masked.type());
// cv::bitwise_and(current_sensor_image, image, masked, mask);
ROS_INFO_STREAM("imrender: 3");
std::string fn = std::to_string(rMsg->header.stamp.toSec()) + "_cam0.png";
cv::imwrite(fn, masked);
std_msgs::Header header;
header.stamp = rMsg->header.stamp;
header.frame_id = "cam0";
sensor_msgs::ImagePtr masked_msg = cv_bridge::CvImage(header, "bgr8", masked).toImageMsg();
sensor_msgs::ImagePtr mask_msg = cv_bridge::CvImage(header, "mono8", mask).toImageMsg();
ROS_INFO_STREAM("imrender: 4");
pub_res.publish(*masked_msg);
pub_mask.publish(*mask_msg);
ROS_INFO_STREAM("imrender: 5");
// cv::imwrite(fn, masked);
}
else{
ROS_INFO("Not found an image with the appropriate timestamp.");
}
}
void sensor_image_callback(sensor_msgs::Image msg){
sensor_images.push_back(msg);
ROS_INFO_STREAM("The sensor_images size is: " << sensor_images.size());
ROS_INFO_STREAM("The sensor_image seq number is: " << msg.header.seq);
// cv::Mat image;
// cv_bridge::CvImagePtr bridge_msg = cv_bridge::toCvCopy(msg, "bgr8");
// ROS_INFO_STREAM("imrender: 6" << std::endl << "received image encoding is: " << msg.encoding);
// current_sensor_image = bridge_msg->image;
// ROS_INFO_STREAM("imrender: 7" << std::endl << "the number of channels in opencv image: " << current_sensor_image.channels());
// if (current_sensor_image.channels() == 1){
// cv::cvtColor(current_sensor_image, current_sensor_image, cv::COLOR_GRAY2BGR);
// }
//// cv::imshow("currr_image", current_sensor_image);
//// cv::waitKey(1);
// auto current_sensor_image_msg = cv_bridge::CvImage(msg.header, "bgr8", current_sensor_image).toImageMsg();
// pub_recv_curr_image.publish(*current_sensor_image_msg);
// ROS_INFO_STREAM("imrender: 8");
}
typedef message_filters::sync_policies::ApproximateTime<sensor_msgs::Image, sensor_msgs::Image> sync_pol;
int main(int argc, char* argv[]){
ros::init(argc, argv, "image_render_combine");
ros::NodeHandle nh;
sensor_images = {};
ROS_INFO("Hello world!");
ros::Subscriber sub_rend_image, sub_sensor_image;
// sub_sensor_image = nh.subscribe("/stereo/left/image_raw", 1000, &sensor_image_callback);
// sub_rend_image = nh.subscribe("/ov_msckf/rendered", 1000, &rendered_image_callback);
auto sensor_image_sub = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(nh, "/stereo/left/image_raw", 1);
auto rend_image_sub = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(nh, "/ov_msckf/rendered", 1);
auto sync = std::make_shared<message_filters::Synchronizer<sync_pol>>(sync_pol(10), *sensor_image_sub, *rend_image_sub);
sync->registerCallback(boost::bind(&rendered_image_callback, _1, _2));
pub_res = nh.advertise<sensor_msgs::Image>("/ov_msckf/combined", 2);
pub_mask = nh.advertise<sensor_msgs::Image>("/ov_msckf/mask", 2);
// pub_recv_curr_image = nh.advertise<sensor_msgs::Image>("/global_fusion_node/sensor_left", 2);
ros::spin();
}