Files
cv_networking_pipeline_linux/drawing.cpp
Ivan 5909db4eec v1
2022-05-27 08:26:38 +03:00

915 lines
37 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
// Created by ivan on 16.05.2022.
// Бинарник, который принимает координаты камеры и кадр и отрисовывает
// изображение с кубами. Это необходимо для проверки работы первого бинарника.
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/video/gstvideometa.h>
#include <deque>
#include <iostream>
#include <mutex>
#include <opencv2/opencv.hpp>
#include <pangolin/pangolin.h>
#include <cstring>
#include <unistd.h>
#include <thread>
#include "osc/OscReceivedElements.h"
#include "osc/OscPacketListener.h"
#include "ip/UdpSocket.h"
#include "osc/OscOutboundPacketStream.h"
#include "sophus/se3.hpp"
#define MY_GST_USE_OPENCV
// CHANGE HERE TO MAKE PORT
#define ADDRESS "127.0.0.1"
#define PORT 7000
#define OUTPUT_BUFFER_SIZE 1024
#if defined(__BORLANDC__) // workaround for BCB4 release build intrinsics bug
namespace std {
using ::__strcmp__; // avoid error: E2316 '__strcmp__' is not a member of 'std'.
}
#endif
std::mutex g_mutex;
std::deque<cv::Mat> frameQueue;
std::deque<double> timestampsQueue;
//std::deque<std::string> TwcQueue;
std::unordered_map<double, Sophus::SE3f> TwcMap;
pangolin::OpenGlRenderState ar_3d_camera;
pangolin::OpenGlMatrixSpec P;
pangolin::OpenGlMatrix Twc;
pangolin::Var<bool> showCube1("menu.Cube1", true, false);
pangolin::Var<bool> showCube2("menu.Cube2", true, false);
pangolin::Var<bool> showCube3("menu.Cube3", true, false);
pangolin::Var<bool> showCube4("menu.Cube4", true, false);
pangolin::Var<bool> showCube5("menu.Cube5", true, false);
pangolin::Var<bool> showCube6("menu.Cube6", true, false);
pangolin::Var<bool> showCube7("menu.Cube7", true, false);
pangolin::Var<bool> drawTexture("menu.drawTexture", true, false);
pangolin::Var<bool> pauseSeq("menu.pauseSeq", false, true);
pangolin::Var<bool> saveRender("menu.saveRender", false, false);
pangolin::Var<float> saveRenderScale("menu.saveRenderScale", 1.0, 0.1, 2.0);
pangolin::Var<int> cubeSize("menu.cubeSize", 2, 1, 5);
pangolin::Var<bool> drawLineCubeBool("menu.drawLineCubeBool", false, true);
pangolin::Var<double> xSkew("menu.x", 0, -40, 40);
pangolin::Var<double> ySkew("menu.y", 0, -5, 5);
pangolin::Var<double> zSkew("menu.z", 0, -360, 360);
void drawLinesCube(float x, float y, float z, int scale){
// float size = 1.0;
// float size = 1.0;
pangolin::OpenGlMatrix M = pangolin::OpenGlMatrix::Translate(-x,-y,-z);
x = 0; y = 0; z = 0;
glPushMatrix();
M.Multiply();
glColor3f(0, 1, 0);
glLineWidth(5);
// glColor3ui(133, 247, 208);
glBegin(GL_LINES);
// Bottom
// glColor3ui(133, 247, 208);
glVertex3f(x, y, z);
glVertex3f(x, y+scale, z);
// glColor3ui(253, 59, 86);
glVertex3f(x, y, z);
glVertex3f(x+scale, y, z);
// glColor3ui(147, 175, 215);
glVertex3f(x, y+scale, z);
glVertex3f(x+scale, y+scale, z);
// glColor3ui(80, 209, 168);
glVertex3f(x+scale, y, z);
glVertex3f(x+scale, y+scale, z);
// Top
// glColor3ui(154, 13, 88);
glVertex3f(x, y, z+scale);
glVertex3f(x, y+scale, z+scale);
// glColor3ui(253, 59, 86);
glVertex3f(x, y, z+scale);
glVertex3f(x+scale, y, z+scale);
// glColor3ui(5, 26, 72);
glVertex3f(x, y+scale, z+scale);
glVertex3f(x+scale, y+scale, z+scale);
// glColor3ui(72, 182, 8);
glVertex3f(x+scale, y, z+scale);
glVertex3f(x+scale, y+scale, z+scale);
// Sides
// glColor3ui(28, 122, 71);
glVertex3f(x, y, z);
glVertex3f(x, y, z+scale);
// glColor3ui(244, 207, 185);
glVertex3f(x, y+scale, z);
glVertex3f(x, y+scale, z+scale);
// glColor3ui(88, 153, 225);
glVertex3f(x+scale, y, z);
glVertex3f(x+scale, y, z+scale);
// glColor3ui(184, 151, 253);
glVertex3f(x+scale, y+scale, z);
glVertex3f(x+scale, y+scale, z+scale);
glEnd();
glPopMatrix();
}
void draw_scene_no_camera(pangolin::View& view){
// UNUSED(view);
view.Activate(ar_3d_camera);
// myMapDrawer->DrawCurrentCamera(Twc);
// if(menuShowKeyFrames || menuShowGraph || menuShowInertialGraph || menuShowOptLba)
// myMapDrawer->DrawKeyFrames(menuShowKeyFrames,menuShowGraph, menuShowInertialGraph, menuShowOptLba);
// if(menuShowPoints)
// myMapDrawer->DrawMapPoints();
glClear(GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
P.Load();
glMatrixMode(GL_MODELVIEW);
ar_3d_camera.Follow(Twc);
if (drawLineCubeBool){
drawLinesCube(xSkew, ySkew, zSkew, cubeSize);
}
// For the 1st sequence mono-kitti
// if (showCube1) {drawLinesCube(0.0, 1.0, -95.0, cubeSize);}
// if (showCube2) {drawLinesCube(0.0, 1.5, -35.0, cubeSize);}
// if (showCube3) {drawLinesCube(5.0, 1.5, -40.0, cubeSize);}
// if (showCube4) {drawLinesCube(-32.0, 1.5, -85.0, cubeSize);}
// if (showCube5) {drawLinesCube(-32.0, 1.5, -95.0, cubeSize);}
// if (showCube6) {drawLinesCube(-64.0, 3.0, -95.0, cubeSize);}
// if (showCube7) {drawLinesCube(-64.0, 3.0, -85.0, cubeSize);}
// For the 6th sequence mono-kitti
if (showCube1) {drawLinesCube(5.15, 0.82, -33.81, cubeSize);}
if (showCube2) {drawLinesCube(0.7, 0.56, -11.5, cubeSize);}
if (showCube3) {drawLinesCube(5.15, 0.8, -95.0, cubeSize);}
if (showCube4) {drawLinesCube(4.6, 2.1, -130.3, cubeSize);}
// This ones' height can be wrong
if (showCube5) {drawLinesCube(4.1, 5.9, -346.5, cubeSize);}
if (showCube6) {drawLinesCube(25.7, 4.8, -346.5, cubeSize);}
// if (showCube7) {drawLinesCube(32.5, 3.8, -345.7, cubeSize);}
pangolin::glDrawAxis(Sophus::SE3d().matrix(), 1.0);
}
void TwcMsgProcessor(const std::string& msg){
double ts;
Eigen::Matrix<float, 3, 3> Rwc;
Eigen::Matrix<float, 1, 3> twc;
std::stringstream ss;
if (!msg.empty()){
ss << msg;
}
ss >> ts;
ss >> Rwc(0,0) >> Rwc(0,1) >> Rwc(0,2) >> twc(0) >>
Rwc(1,0) >> Rwc(1,1) >> Rwc(1,2) >> twc(1) >>
Rwc(2,0) >> Rwc(2,1) >> Rwc(2,2) >> twc(2);
Sophus::SE3f Twc;
Twc.setRotationMatrix(Rwc);
Twc.translation() = twc;
TwcMap[ts] = Twc;
}
namespace osc{
class OscReceiveTestPacketListener : public OscPacketListener{
protected:
void ProcessMessage( const osc::ReceivedMessage& m,
const IpEndpointName& remoteEndpoint )
{
(void) remoteEndpoint; // suppress unused parameter warning
// a more complex scheme involving std::map or some other method of
// processing address patterns could be used here
// (see MessageMappingOscPacketListener.h for example). however, the main
// purpose of this example is to illustrate and test different argument
// parsing methods
try {
// argument stream, and argument iterator, used in different
// examples below.
ReceivedMessageArgumentStream args = m.ArgumentStream();
ReceivedMessage::const_iterator arg = m.ArgumentsBegin();
if( std::strcmp( m.AddressPattern(), "/test1" ) == 0 ){
// example #1:
// parse an expected format using the argument stream interface:
bool a1;
osc::int32 a2;
float a3;
const char *a4;
args >> a1 >> a2 >> a3 >> a4 >> osc::EndMessage;
std::cout << "received '/test1' message with arguments: "
<< a1 << " " << a2 << " " << a3 << " " << a4 << "\n";
}else if( std::strcmp( m.AddressPattern(), "/test2" ) == 0 ){
// example #2:
// parse an expected format using the argument iterator interface
// this is a more complicated example of doing the same thing
// as above.
bool a1 = (arg++)->AsBool();
int a2 = (arg++)->AsInt32();
float a3 = (arg++)->AsFloat();
const char *a4 = (arg++)->AsString();
if( arg != m.ArgumentsEnd() )
throw ExcessArgumentException();
std::cout << "received '/test2' message with arguments: "
<< a1 << " " << a2 << " " << a3 << " " << a4 << "\n";
}else if( std::strcmp( m.AddressPattern(), "/test3" ) == 0 ){
// example #3:
// parse a variable argument format using the argument iterator
// interface. this is where it is necessary to use
// argument iterators instead of streams.
// When messages may contain arguments of varying type, you can
// use the argument iterator interface to query the types at
// runtime. this is more flexible that the argument stream
// interface, which requires each argument to have a fixed type
if( arg->IsBool() ){
bool a = (arg++)->AsBoolUnchecked();
std::cout << "received '/test3' message with bool argument: "
<< a << "\n";
}else if( arg->IsInt32() ){
int a = (arg++)->AsInt32Unchecked();
std::cout << "received '/test3' message with int32 argument: "
<< a << "\n";
}else if( arg->IsFloat() ){
float a = (arg++)->AsFloatUnchecked();
std::cout << "received '/test3' message with float argument: "
<< a << "\n";
}else if( arg->IsString() ){
const char *a = (arg++)->AsStringUnchecked();
std::cout << "received '/test3' message with string argument: '"
<< a << "'\n";
std::string msg(a);
TwcMsgProcessor(msg);
}else{
std::cout << "received '/test3' message with unexpected argument type\n";
}
if( arg != m.ArgumentsEnd() )
throw ExcessArgumentException();
}else if( std::strcmp( m.AddressPattern(), "/no_arguments" ) == 0 ){
args >> osc::EndMessage;
std::cout << "received '/no_arguments' message\n";
}else if( std::strcmp( m.AddressPattern(), "/a_bool" ) == 0 ){
bool a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_bool' message: " << a << "\n";
}else if( std::strcmp( m.AddressPattern(), "/nil" ) == 0 ){
std::cout << "received '/nil' message\n";
}else if( std::strcmp( m.AddressPattern(), "/inf" ) == 0 ){
std::cout << "received '/inf' message\n";
}else if( std::strcmp( m.AddressPattern(), "/an_int" ) == 0 ){
osc::int32 a;
args >> a >> osc::EndMessage;
std::cout << "received '/an_int' message: " << a << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_float" ) == 0 ){
float a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_float' message: " << a << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_char" ) == 0 ){
char a;
args >> a >> osc::EndMessage;
char s[2] = {0};
s[0] = a;
std::cout << "received '/a_char' message: '" << s << "'\n";
}else if( std::strcmp( m.AddressPattern(), "/an_rgba_color" ) == 0 ){
osc::RgbaColor a;
args >> a >> osc::EndMessage;
std::cout << "received '/an_rgba_color' message: " << a.value << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_midi_message" ) == 0 ){
osc::MidiMessage a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_midi_message' message: " << a.value << "\n";
}else if( std::strcmp( m.AddressPattern(), "/an_int64" ) == 0 ){
osc::int64 a;
args >> a >> osc::EndMessage;
std::cout << "received '/an_int64' message: " << a << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_time_tag" ) == 0 ){
osc::TimeTag a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_time_tag' message: " << a.value << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_double" ) == 0 ){
double a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_double' message: " << a << "\n";
}else if( std::strcmp( m.AddressPattern(), "/a_string" ) == 0 ){
const char *a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_string' message: '" << a << "'\n";
}else if( std::strcmp( m.AddressPattern(), "/a_symbol" ) == 0 ){
osc::Symbol a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_symbol' message: '" << a.value << "'\n";
}else if( std::strcmp( m.AddressPattern(), "/a_blob" ) == 0 ){
osc::Blob a;
args >> a >> osc::EndMessage;
std::cout << "received '/a_blob' message\n";
}else{
std::cout << "unrecognised address pattern: "
<< m.AddressPattern() << "\n";
}
}catch( Exception& e ){
std::cout << "error while parsing message: "
<< m.AddressPattern() << ": " << e.what() << "\n";
}
}
};
void RunReceiveTest( int port )
{
osc::OscReceiveTestPacketListener listener;
UdpListeningReceiveSocket s(
IpEndpointName( IpEndpointName::ANY_ADDRESS, port ),
&listener );
std::cout << "listening for input on port " << port << "...\n";
std::cout << "press ctrl-c to end\n";
s.RunUntilSigInt();
std::cout << "finishing.\n";
}
// namespace osc
}
GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) {
g_print ("Got preroll!\n");
return GST_FLOW_OK;
}
GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) {
static int framecount = 0;
framecount++;
std::cout << "nnew frame " << framecount << std::endl;
GstSample *sample = gst_app_sink_pull_sample(appsink);
GstCaps *caps = gst_sample_get_caps(sample);
GstBuffer *buffer = gst_sample_get_buffer(sample);
const auto& n_memory = gst_buffer_n_memory(buffer);
std::cout << "n_memory = " << n_memory << std::endl;
std::cout << "buffer->pts = " << buffer->pts << std::endl;
std::cout << "buffer->dts = " << buffer->dts << std::endl;
std::cout << "buffer->duration = " << buffer->duration << std::endl;
std::cout << "buffer->offset = " << buffer->offset << std::endl;
std::cout << "buffer->offset_end = " << buffer->offset_end << std::endl;
const GstStructure *info = gst_sample_get_info(sample);
GstMeta *gst_meta;
gpointer state = nullptr;
while ((gst_meta = gst_buffer_iterate_meta(buffer, &state))) {
if (gst_meta->info == gst_video_caption_meta_get_info()) {
auto specific_meta = (GstVideoCaptionMeta *) gst_meta;
if (specific_meta) {
auto x = (const char *) (specific_meta->data);
std::cout << "MetaInfo is recognized to be [GstVideoCaptionMeta]"
<< "caption = " << std::string(x, specific_meta->size)
<< std::endl;
std::string meta(x);
int idx = meta.find("timestamp: ");
int end_idx = meta.find(">");
std::string ts_str = meta.substr(idx+11, end_idx-1-(idx+11));
// std::cout << ts_str << std::endl;
std::lock_guard<std::mutex> guard(g_mutex);
timestampsQueue.push_back(std::stod(ts_str));
}
} else if (gst_meta->info == gst_video_time_code_meta_get_info()) {
auto specific_meta = (GstVideoTimeCodeMeta *) gst_meta;
if (specific_meta) {
std::cout << "MetaInfo is recognized to be [GstVideoTimeCodeMeta]"
<< " h = " << specific_meta->tc.hours
<< " m = " << specific_meta->tc.minutes
<< " s = " << specific_meta->tc.seconds
<< " f = " << specific_meta->tc.frames
<< std::endl;
}
} else if (gst_meta->info == gst_meta_get_info("GstNdiSrcMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSrcMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNdiSinkAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSinkAudioMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoCropMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCropMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstFramePositionerMeta")) {
std::cout << "MetaInfo is recognized to be [GstFramePositionerMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaDfbSurface")) {
std::cout << "MetaInfo is recognized to be [GstMetaDfbSurface]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSubtitleMeta")) {
std::cout << "MetaInfo is recognized to be [GstSubtitleMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRtmpMeta")) {
std::cout << "MetaInfo is recognized to be [GstRtmpMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMpegVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstMpegVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSctpReceiveMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpReceiveMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSctpSendMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpSendMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstCoreMediaMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreMediaMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstCoreVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioDownmixMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioDownmixMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioClippingMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioClippingMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstGLSyncMeta")) {
std::cout << "MetaInfo is recognized to be [GstGLSyncMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoGLTextureUploadMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoGLTextureUploadMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoRegionOfInterestMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoRegionOfInterestMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoAFDMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAFDMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoBarMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoBarMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoMultiviewMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMultiviewMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoOverlayCompositionMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoOverlayCompositionMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaXImage")) {
std::cout << "MetaInfo is recognized to be [GstMetaXImage]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstProtectionMeta")) {
std::cout << "MetaInfo is recognized to be [GstProtectionMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNetControlMessageMeta")) {
std::cout << "MetaInfo is recognized to be [GstNetControlMessageMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaTest")) {
std::cout << "MetaInfo is recognized to be [GstMetaTest]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNVMMParentMeta")) {
std::cout << "MetaInfo is recognized to be [GstNVMMParentMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioLevelMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioLevelMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoAffineTransformationMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAffineTransformationMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoCodecAlphaMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCodecAlphaMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]";
} else {
std::cout << "GstMetaInfo is not recognized."
<< " info = " << gst_meta->info
<< " api = " << gst_meta->info->api
<< std::endl;
}
}
// ---- Read frame and convert to opencv format ---------------
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
#ifdef MY_GST_USE_OPENCV
// convert gstreamer data to OpenCV Mat, you could actually
// resolve height / width from caps...
int width = 2560;
int height = 1440;
GstStructure *s = gst_caps_get_structure(caps, 0);
gboolean res = true;
res &= gst_structure_get_int (s, "width", &width);
res &= gst_structure_get_int (s, "height", &height);
cv::Mat frame(cv::Size(width, height), CV_8UC4, (char*)map.data, cv::Mat::AUTO_STEP);
std::printf("The image width and height: %d %d", frame.cols, frame.rows);
// int frameSize = map.size;
// std::cout << "size from caps = (" << width << "," << height << ")" << "res =" << res
// << " total size = " << map.size
// << std::endl;
// if (res) {
// std::fstream file("example.bin", std::ios::out | std::ios::binary | std::ios::app);
// file.write((char*)map.data, map.size);
// file.close();
// }
// throw 1;
{
std::lock_guard<std::mutex> guard(g_mutex);\
std::cout << "Got the frame. Saving it..." << std::endl;
frameQueue.push_back(frame.clone());
}
#endif
gst_buffer_unmap(buffer, &map);
// ------------------------------------------------------------
// print dot every 30 frames
if (framecount%30 == 0) {
g_print (".");
}
// show caps on first frame
if (framecount == 1) {
g_print ("%s\n", gst_caps_to_string(caps));
}
gst_sample_unref (sample);
return GST_FLOW_OK;
}
static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) {
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
case GST_MESSAGE_EOS: {
/* end-of-stream */
break;
} default: {
/* unhandled message */
break;
}
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
static gint repeats = 2;
static gchar* ndi_name = nullptr;
static gint use_gui = 1;
static gboolean beep = FALSE;
static GOptionEntry entries[] =
{
{ "repeats", 'r', 0, G_OPTION_ARG_INT, &repeats, "Среднее число повторений N", "N" },
{ "ndi-name", 'n', 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
{ "gui", 'g', 0, G_OPTION_ARG_INT, &use_gui, "use gui", nullptr },
{ "beep", 'b', 0, G_OPTION_ARG_NONE, &beep, "Сигнал при выполнениии", NULL },
{ NULL }
};
int main (int argc, char *argv[]) {
std::cout << "argc = " << argc << std::endl;
GError *error = nullptr;
GOptionContext *context;
char** argv_gst;
int argc_gst = 2;
argv_gst = new char* [2];
argv_gst[0] = new char[200];
argv_gst[1] = new char[200];
strcpy(argv_gst[0], argv[0]);
strcpy(argv_gst[1], argv[1]);
// std::cout << "The second parameter of the arguments: " << argv[2] << " " << argv[1] << " " << argv[0] << std::endl;
context = g_option_context_new("- test tree model performance");
g_option_context_add_main_entries(context, entries, "bla");
g_option_context_parse(context, &argc_gst, &argv_gst, &error);
// g_option_context_parse(context, &argc, &argv, &error);
if (!ndi_name) {
std::cout << "ndi-name is not provided" << std::endl;
// ndi_name = (char*)malloc(sizeof(char) * 100);
ndi_name = "DESKTOP-O5PNOBN (Test Pattern)";
std::cout << "ndi-name (default) = '" << ndi_name << "'" << std::endl;
} else {
std::cout << "ndi-name = '" << ndi_name << "'" << std::endl;
}
GstStateChangeReturn ret;
int fake_argc = 1;
gst_init (&fake_argc, &argv_gst);
std::stringstream ss;
ss << "ndisrc ndi-name=\"" << ndi_name << "\" ! ndisrcdemux name=demux "
<< "demux.video ! queue ! tee name=my_tee "
<< "my_tee. ! queue ! videoconvert ! autovideosink "
<< "my_tee. ! queue ! videoconvert ! appsink name=my_sink";
std::string my_pipeline = ss.str();
GstElement *pipeline = gst_parse_launch(my_pipeline.c_str(), nullptr);
/* get sink */
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "my_sink");
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
gst_app_sink_set_drop((GstAppSink*)sink, true);
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);
GstBus *bus;
guint bus_watch_id;
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
gst_object_unref (bus);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
// ------------ Pangolin ------------
// Create OpenGL window in single line
pangolin::CreateWindowAndBind("Main",640,480);
pangolin::CreatePanel("menu").SetBounds(0.0,1.0,0.0,pangolin::Attach::Pix(175));
// 3D Mouse handler requires depth testing to be enabled
glEnable(GL_DEPTH_TEST);
pangolin::OpenGlRenderState s_cam(
pangolin::ProjectionMatrix(640,480,420,420,320,240,0.1,1000),
pangolin::ModelViewLookAt(0,0,0, 0,0,1, pangolin::AxisNegY)
);
// Aspect ratio allows us to constrain width and height whilst fitting within specified
// bounds. A positive aspect ratio makes a view 'shrink to fit' (introducing empty bars),
// whilst a negative ratio makes the view 'grow to fit' (cropping the view).
pangolin::View& d_cam = pangolin::Display("cam")
.SetBounds(0,1.0f,pangolin::Attach::Pix(175),1.0f,-640/480.0)
.SetHandler(new pangolin::Handler3D(s_cam));
float fxForAR = 707.0912, fyForAR = 707.0912, cxForAR = 601.8873, cyForAR = 183.1104;
ar_3d_camera
.SetProjectionMatrix(pangolin::ProjectionMatrix((int)1226.0, (int)370, fxForAR, fyForAR,
cxForAR, cyForAR, 0.1, 1000))
.SetModelViewMatrix(pangolin::ModelViewLookAt(0, 0, 0, 0, 0, 1, pangolin::AxisNegY));
// Define the projection matrix
P = pangolin::ProjectionMatrix((int)1226.0, (int)370.0, fxForAR, fyForAR, cxForAR, cyForAR, 0.001, 10000);
pangolin::GlTexture imageTexture(1226,370,GL_RGBA,false,0,GL_RGBA,GL_UNSIGNED_BYTE);
// This view will take up no more than a third of the windows width or height, and it
// will have a fixed aspect ratio to match the image that it will display. When fitting
// within the specified bounds, push to the top-left (as specified by SetLock).
pangolin::View& d_image = pangolin::Display("image")
.SetBounds(0,pangolin::Attach::Pix(370), pangolin::Attach::Pix(175),pangolin::Attach::Pix(175 + 1226), 1226.0/370.0)
.SetHandler(new pangolin::Handler3D(ar_3d_camera))
.SetLock(pangolin::LockLeft, pangolin::LockTop);
d_image.extern_draw_function = draw_scene_no_camera;
// std::cout << "The second parameter of the arguments: " << argv[2] << " " << argv[1] << " " << argv[0] << std::endl;
#ifdef MY_GST_USE_OPENCV
auto lambda_1 = [&d_cam, &s_cam, &imageTexture, &d_image] () {
int counter = 0;
cv::Mat frame;
double ts;
Sophus::SE3f Twc;
while (true) {
if (use_gui) {
cv::namedWindow("preview", 1);
} else {
// cv::namedWindow("no preview", 1);
}
{
std::lock_guard<std::mutex> guard(g_mutex);
if (frameQueue.size() > 0) {
frame = frameQueue.front();
ts = timestampsQueue.front();
frameQueue.pop_front();
timestampsQueue.pop_front();
std::cout << "we have a frame to process..." << std::endl;
}
}
if (!frame.empty()){
cv::Rect leftROI(0, 0, frame.cols/2, frame.rows);
cv::Rect rightROI(frame.cols/2, 0, frame.cols, frame.rows);
cv::Mat imLeft = frame(leftROI);
std::printf("The Left Image width and height: %d %d", imLeft.cols, imLeft.rows);
cv::Mat imRight = frame(rightROI);
std::printf("The Right Image width and height: %d %d", imRight.cols, imRight.rows);
std::printf("The timestamp is: ", ts);
if ( TwcMap.find(ts) != TwcMap.end() ){
Twc = TwcMap[ts];
}
// Visualizing using Pangolin.
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
d_cam.Activate(s_cam);
glClearColor(1.0, 1.0, 1.0, 1.0);
glColor3f(1.0,1.0,1.0);
if (drawLineCubeBool){
drawLinesCube(xSkew, ySkew, zSkew, cubeSize);
std::cout << "DrawLinesCube after d_cam Activate success" << std::endl;
}
// For the 1st sequence mono-kitti
// if (showCube1) {drawLinesCube(0.0, 1.0, -95.0, cubeSize);}
// if (showCube2) {drawLinesCube(0.0, 1.5, -35.0, cubeSize);}
// if (showCube3) {drawLinesCube(5.0, 1.5, -40.0, cubeSize);}
// if (showCube4) {drawLinesCube(-32.0, 1.5, -85.0, cubeSize);}
// if (showCube5) {drawLinesCube(-32.0, 1.5, -95.0, cubeSize);}
// if (showCube6) {drawLinesCube(-64.0, 3.0, -95.0, cubeSize);}
// if (showCube7) {drawLinesCube(-64.0, 3.0, -85.0, cubeSize);}
// For the 6th sequence mono-kitti
if (showCube1) {drawLinesCube(5.15, 0.82, -33.81, cubeSize);}
if (showCube2) {drawLinesCube(0.7, 0.56, -11.5, cubeSize);}
if (showCube3) {drawLinesCube(5.15, 0.8, -95.0, cubeSize);}
if (showCube4) {drawLinesCube(4.6, 2.1, -130.3, cubeSize);}
// This ones' height can be wrong
if (showCube5) {drawLinesCube(4.1, 5.9, -346.5, cubeSize);}
if (showCube6) {drawLinesCube(25.7, 4.8, -346.5, cubeSize);}
// if (showCube7) {drawLinesCube(32.5, 3.8, -345.7, cubeSize);}
pangolin::glDrawAxis(Sophus::SE3d().matrix(), 1.0);
//display the image
d_image.Activate(ar_3d_camera);
glColor3f(1.0,1.0,1.0);
// Shows the image uploaded to the device (GPU or CPU) in the ViewPort.
cv::cvtColor(imLeft, imLeft, cv::COLOR_BGR2RGBA);
imageTexture.Upload(imLeft.data,GL_RGBA,GL_UNSIGNED_BYTE);
// std::cout << "Number of channels: " << imLeft.channels() << std::endl;
// s_cam.Follow(Twc);
if (drawTexture){
imageTexture.RenderToViewportFlipY();
}
if (saveRender){
d_image.SaveRenderNow(std::to_string(counter) + "_demo", saveRenderScale);
// ar_view.SaveRenderNow(mpFrameDrawer->getCurrImgFileName() + "_demo", saveRenderScale);
}
pangolin::FinishFrame();
}
counter += 1;
usleep(33);
if (use_gui) {
if (!frame.empty()) {
cv::Mat edges;
cvtColor(frame, edges, cv::COLOR_BGR2BGRA);
cv::imshow("preview", frame);
}
cv::waitKey(30);
}
// delete [] buffer;
}
};
auto lambda_2 = [&argc, &argv] () {
if( argc >= 2 && std::strcmp( argv[1], "-h" ) == 0 ){
std::cout << "usage: OscReceiveTest [port]\n";
return 0;
}
int port = 7000;
if( argc >= 2 ) {
// std::cout << "The second parameter of the arguments: " << argv[2] << " " << argv[1] << " " << argv[0] << std::endl;
port = std::atoi( argv[2] );
std::cout << "Detected the port input: " << port << std::endl;
}
// Listener to fill the TwcQueue
osc::RunReceiveTest( port );
};
std::thread t2(lambda_2);
std::cout << "Lambda 2 function works in the thread t2 ..." << std::endl;
std::thread t1(lambda_1);
std::cout << "Lambda 1 function works in the thread t1 ..." << std::endl;
bool is_terminated = false;
while (!is_terminated) {
// g_main_iteration(false);
g_main_context_iteration(NULL, false);
}
t2.join();
t1.join();
#else
bool is_terminated = false;
while (!is_terminated) {
g_main_context_iteration(NULL, false);
}
#endif // MY_GST_USE_OPENCV
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}