542 lines
23 KiB
C++
542 lines
23 KiB
C++
#include <gst/gst.h>
|
|
#include <gst/app/gstappsink.h>
|
|
#include <sstream>
|
|
//#include <format>
|
|
#include <gst/video/gstvideometa.h>
|
|
#include <cstdlib>
|
|
#include <deque>
|
|
#include <iostream>
|
|
#include <mutex>
|
|
#include <fstream>
|
|
|
|
#include "osc/OscOutboundPacketStream.h"
|
|
#include "ip/UdpSocket.h"
|
|
|
|
#include "System.h"
|
|
|
|
// CHANGE HERE THE PORT
|
|
#define ADDRESS "127.0.0.1"
|
|
#define PORT 7000
|
|
#define OUTPUT_BUFFER_SIZE 1024
|
|
|
|
#define MY_GST_USE_OPENCV
|
|
#ifdef MY_GST_USE_OPENCV
|
|
#include "opencv2/opencv.hpp"
|
|
// TODO: use synchronized deque
|
|
std::mutex g_mutex;
|
|
std::deque<cv::Mat> frameQueue;
|
|
std::deque<double> timestampsQueue;
|
|
#endif // MY_GST_USE_OPENCV
|
|
|
|
GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) {
|
|
g_print ("Got preroll!\n");
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) {
|
|
static int framecount = 0;
|
|
framecount++;
|
|
|
|
std::cout << "nnew frame " << framecount << std::endl;
|
|
|
|
GstSample *sample = gst_app_sink_pull_sample(appsink);
|
|
GstCaps *caps = gst_sample_get_caps(sample);
|
|
GstBuffer *buffer = gst_sample_get_buffer(sample);
|
|
|
|
const auto& n_memory = gst_buffer_n_memory(buffer);
|
|
std::cout << "n_memory = " << n_memory << std::endl;
|
|
std::cout << "buffer->pts = " << buffer->pts << std::endl;
|
|
std::cout << "buffer->dts = " << buffer->dts << std::endl;
|
|
std::cout << "buffer->duration = " << buffer->duration << std::endl;
|
|
std::cout << "buffer->offset = " << buffer->offset << std::endl;
|
|
std::cout << "buffer->offset_end = " << buffer->offset_end << std::endl;
|
|
|
|
const GstStructure *info = gst_sample_get_info(sample);
|
|
|
|
GstMeta *gst_meta;
|
|
gpointer state = nullptr;
|
|
while ((gst_meta = gst_buffer_iterate_meta(buffer, &state))) {
|
|
if (gst_meta->info == gst_video_caption_meta_get_info()) {
|
|
auto specific_meta = (GstVideoCaptionMeta *) gst_meta;
|
|
if (specific_meta) {
|
|
auto x = (const char *) (specific_meta->data);
|
|
std::cout << "MetaInfo is recognized to be [GstVideoCaptionMeta]"
|
|
<< "caption = " << std::string(x, specific_meta->size)
|
|
<< std::endl;
|
|
std::string meta(x);
|
|
int idx = meta.find("timestamp: ");
|
|
int end_idx = meta.find(">");
|
|
string ts_str = meta.substr(idx+11, end_idx-1-(idx+11));
|
|
// std::cout << ts_str << std::endl;
|
|
std::lock_guard<std::mutex> guard(g_mutex);
|
|
timestampsQueue.push_back(std::stod(ts_str));
|
|
|
|
}
|
|
} else if (gst_meta->info == gst_video_time_code_meta_get_info()) {
|
|
auto specific_meta = (GstVideoTimeCodeMeta *) gst_meta;
|
|
if (specific_meta) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoTimeCodeMeta]"
|
|
<< " h = " << specific_meta->tc.hours
|
|
<< " m = " << specific_meta->tc.minutes
|
|
<< " s = " << specific_meta->tc.seconds
|
|
<< " f = " << specific_meta->tc.frames
|
|
<< std::endl;
|
|
}
|
|
} else if (gst_meta->info == gst_meta_get_info("GstNdiSrcMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstNdiSrcMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstNdiSinkAudioMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstNdiSinkAudioMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoCropMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoCropMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstFramePositionerMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstFramePositionerMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstMetaDfbSurface")) {
|
|
std::cout << "MetaInfo is recognized to be [GstMetaDfbSurface]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstSubtitleMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstSubtitleMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstRtmpMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstRtmpMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstMpegVideoMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstMpegVideoMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstSctpReceiveMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstSctpReceiveMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstSctpSendMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstSctpSendMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstCoreMediaMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstCoreMediaMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstCoreVideoMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstCoreVideoMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstAudioDownmixMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstAudioDownmixMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstAudioClippingMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstAudioClippingMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstGLSyncMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstGLSyncMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoGLTextureUploadMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoGLTextureUploadMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoRegionOfInterestMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoRegionOfInterestMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoAFDMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoAFDMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoBarMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoBarMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoMultiviewMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoMultiviewMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoOverlayCompositionMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoOverlayCompositionMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstMetaXImage")) {
|
|
std::cout << "MetaInfo is recognized to be [GstMetaXImage]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstProtectionMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstProtectionMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstNetControlMessageMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstNetControlMessageMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstMetaTest")) {
|
|
std::cout << "MetaInfo is recognized to be [GstMetaTest]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstNVMMParentMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstNVMMParentMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstAudioMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstAudioMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstAudioLevelMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstAudioLevelMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoAffineTransformationMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoAffineTransformationMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("GstVideoCodecAlphaMeta")) {
|
|
std::cout << "MetaInfo is recognized to be [GstVideoCodecAlphaMeta]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("StreamTimestamp")) {
|
|
std::cout << "MetaInfo is recognized to be [StreamTimestamp]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
|
<< std::endl;
|
|
} else {
|
|
std::cout << "GstMetaInfo is not recognized."
|
|
<< " info = " << gst_meta->info
|
|
<< " api = " << gst_meta->info->api
|
|
<< std::endl;
|
|
}
|
|
}
|
|
|
|
// ---- Read frame and convert to opencv format ---------------
|
|
|
|
GstMapInfo map;
|
|
gst_buffer_map (buffer, &map, GST_MAP_READ);
|
|
|
|
#ifdef MY_GST_USE_OPENCV
|
|
// convert gstreamer data to OpenCV Mat, you could actually
|
|
// resolve height / width from caps...
|
|
|
|
int width = 2560;
|
|
int height = 1440;
|
|
|
|
GstStructure *s = gst_caps_get_structure(caps, 0);
|
|
gboolean res = true;
|
|
res &= gst_structure_get_int (s, "width", &width);
|
|
res &= gst_structure_get_int (s, "height", &height);
|
|
|
|
cv::Mat frame(cv::Size(width, height), CV_8UC4, (char*)map.data, cv::Mat::AUTO_STEP);
|
|
|
|
std::printf("The image width and height: %d %d", frame.cols, frame.rows);
|
|
|
|
// int frameSize = map.size;
|
|
// std::cout << "size from caps = (" << width << "," << height << ")" << "res =" << res
|
|
// << " total size = " << map.size
|
|
// << std::endl;
|
|
|
|
// if (res) {
|
|
// std::fstream file("example.bin", std::ios::out | std::ios::binary | std::ios::app);
|
|
// file.write((char*)map.data, map.size);
|
|
// file.close();
|
|
// }
|
|
// throw 1;
|
|
{
|
|
std::lock_guard<std::mutex> guard(g_mutex);
|
|
frameQueue.push_back(frame.clone());
|
|
}
|
|
#endif
|
|
|
|
gst_buffer_unmap(buffer, &map);
|
|
|
|
// ------------------------------------------------------------
|
|
|
|
// print dot every 30 frames
|
|
if (framecount%30 == 0) {
|
|
g_print (".");
|
|
}
|
|
|
|
// show caps on first frame
|
|
if (framecount == 1) {
|
|
g_print ("%s\n", gst_caps_to_string(caps));
|
|
}
|
|
|
|
gst_sample_unref (sample);
|
|
return GST_FLOW_OK;
|
|
}
|
|
|
|
static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) {
|
|
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
|
|
switch (GST_MESSAGE_TYPE (message)) {
|
|
case GST_MESSAGE_ERROR: {
|
|
GError *err;
|
|
gchar *debug;
|
|
|
|
gst_message_parse_error (message, &err, &debug);
|
|
g_print ("Error: %s\n", err->message);
|
|
g_error_free (err);
|
|
g_free (debug);
|
|
break;
|
|
}
|
|
case GST_MESSAGE_EOS: {
|
|
/* end-of-stream */
|
|
break;
|
|
} default: {
|
|
/* unhandled message */
|
|
break;
|
|
}
|
|
}
|
|
/* we want to be notified again the next time there is a message
|
|
* on the bus, so returning TRUE (FALSE means we want to stop watching
|
|
* for messages on the bus and our callback should not be called again)
|
|
*/
|
|
return TRUE;
|
|
}
|
|
|
|
static gint repeats = 2;
|
|
static gchar* ndi_name = nullptr;
|
|
static gint use_gui = 0;
|
|
static gboolean beep = FALSE;
|
|
|
|
static GOptionEntry entries[] =
|
|
{
|
|
{ "repeats", 'r', 0, G_OPTION_ARG_INT, &repeats, "Среднее число повторений N", "N" },
|
|
{ "ndi-name", 'n', 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
|
|
{ "gui", 'g', 0, G_OPTION_ARG_INT, &use_gui, "use gui", nullptr },
|
|
{ "beep", 'b', 0, G_OPTION_ARG_NONE, &beep, "Сигнал при выполнениии", NULL },
|
|
{ NULL }
|
|
};
|
|
|
|
int main (int argc, char *argv[]) {
|
|
std::cout << "argc = " << argc << std::endl;
|
|
GError *error = nullptr;
|
|
GOptionContext *context;
|
|
|
|
char** argv_gst;
|
|
int argc_gst = 2;
|
|
argv_gst = new char* [2];
|
|
argv_gst[0] = new char[200];
|
|
argv_gst[1] = new char[200];
|
|
|
|
strcpy(argv_gst[0], argv[0]);
|
|
strcpy(argv_gst[1], argv[3]);
|
|
|
|
context = g_option_context_new("- test tree model performance");
|
|
g_option_context_add_main_entries(context, entries, "bla");
|
|
g_option_context_parse(context, &argc_gst, &argv_gst, &error);
|
|
// g_option_context_parse(context, &argc, &argv, &error);
|
|
|
|
|
|
if (!ndi_name) {
|
|
std::cout << "ndi-name is not provided" << std::endl;
|
|
// ndi_name = (char*)malloc(sizeof(char) * 100);
|
|
ndi_name = "DESKTOP-O5PNOBN (Test Pattern)";
|
|
std::cout << "ndi-name (default) = '" << ndi_name << "'" << std::endl;
|
|
} else {
|
|
std::cout << "ndi-name = '" << ndi_name << "'" << std::endl;
|
|
}
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
int fake_argc = 1;
|
|
// gst_init (&fake_argc, &argv);
|
|
gst_init (&fake_argc, &argv);
|
|
|
|
/*
|
|
s_pipeline = f'ndisrc ndi-name="{ndi_name}" ! ndisrcdemux name=demux ' \
|
|
f'demux.video ! queue ! tee name=my_tee ' \
|
|
f'my_tee. ! queue ! videoconvert ! autovideosink ' \
|
|
f'my_tee. ! queue ! videoconvert ! appsink name=my_sink'
|
|
*/
|
|
std::stringstream ss;
|
|
ss << "ndisrc ndi-name=\"" << ndi_name << "\" ! ndisrcdemux name=demux "
|
|
<< "demux.video ! queue ! tee name=my_tee "
|
|
<< "my_tee. ! queue ! videoconvert ! autovideosink "
|
|
<< "my_tee. ! queue ! videoconvert ! appsink name=my_sink";
|
|
std::string my_pipeline = ss.str();
|
|
|
|
// GstElement *my_src = nullptr;
|
|
// my_src = gst_element_factory_make ("ndisrc", "my_ndi_src");
|
|
// std::cout << my_src << std::endl;
|
|
// if (!my_src) {
|
|
// std::cerr << "Can't create ndivideosrc" << std::endl;
|
|
// return -1;
|
|
// }
|
|
// std::cerr << "ndivideosrc is created with success!" << std::endl;
|
|
|
|
// GstElement *my_sink = nullptr;
|
|
// my_sink = gst_element_factory_make ("appsink", "my_sink");
|
|
// std::cout << my_sink << std::endl;
|
|
// if (!my_sink) {
|
|
// std::cerr << "Can't create appsink" << std::endl;
|
|
// return -1;
|
|
// }
|
|
// std::cerr << "appsink is created with success!" << std::endl;
|
|
|
|
GstElement *pipeline = gst_parse_launch(my_pipeline.c_str(), nullptr);
|
|
|
|
// GstElement *pipeline = nullptr;
|
|
// pipeline = gst_pipeline_new ("test-pipeline");
|
|
// std::cout << my_sink << std::endl;
|
|
// if (!pipeline) {
|
|
// std::cerr << "Can't create pipeline" << std::endl;
|
|
// return -1;
|
|
// }
|
|
// std::cerr << "pipeline is created with success!" << std::endl;
|
|
|
|
// gst_bin_add_many (GST_BIN (pipeline), my_src, my_sink, NULL);
|
|
// std::cout << "here!" << std::endl;
|
|
// if (!gst_element_link_many (my_src, my_sink, NULL)) {
|
|
// g_printerr ("Elements could not be linked.\n");
|
|
// gst_object_unref (pipeline);
|
|
// return -1;
|
|
// }
|
|
// std::cout << "here -2 !" << std::endl;
|
|
|
|
// g_object_set (my_src, "ndi-name", "ADMIN (lafvi 29.97fps)", NULL);
|
|
/*
|
|
[libndi_newtek @ 0x556ab959f400] Found 4 NDI sources:
|
|
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (CameraVegasAR)' '185.41.112.227:5962'
|
|
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (NVIDIA Quadro RTX 5000 1)' '185.41.112.227:5961'
|
|
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (NVIDIA Quadro RTX 5000 2)' '185.41.112.227:5963'
|
|
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (Test Pattern)' '185.41.112.227:5964'
|
|
*/
|
|
// g_object_set (my_src, "ndi-name", ndi_name, NULL);
|
|
|
|
/* get sink */
|
|
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "my_sink");
|
|
|
|
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
|
|
gst_app_sink_set_drop((GstAppSink*)sink, true);
|
|
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
|
|
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
|
|
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);
|
|
|
|
GstBus *bus;
|
|
guint bus_watch_id;
|
|
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
|
|
bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
|
|
gst_object_unref (bus);
|
|
|
|
/* Start playing */
|
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
|
gst_object_unref (pipeline);
|
|
return -1;
|
|
}
|
|
|
|
#ifdef MY_GST_USE_OPENCV
|
|
auto lambda_1 = [] (char** argv) {
|
|
std::cout << "The path to settings: " << argv[2] << std::endl;
|
|
std::cout << "The path to vocabulary: " << argv[1] << std::endl;
|
|
ORB_SLAM3::System SLAM(argv[1], argv[2], ORB_SLAM3::System::STEREO, false);
|
|
std::printf("SLAM system initialized \n");
|
|
|
|
while (true) {
|
|
if (use_gui) {
|
|
cv::namedWindow("preview", 1);
|
|
} else {
|
|
// cv::namedWindow("no preview", 1);
|
|
}
|
|
cv::Mat frame;
|
|
double ts;
|
|
|
|
// char * buffer = nullptr;
|
|
// {
|
|
// int length;
|
|
// std::cout << "trying to open example.bin" << std::endl;
|
|
// std::ifstream is;
|
|
// is.open("example.bin", std::ios::binary);
|
|
// is.seekg(0, std::ios::end);
|
|
// length = is.tellg();
|
|
// is.seekg(0, std::ios::beg);
|
|
// buffer = new char [length];
|
|
// is.read(buffer, length);
|
|
// is.close();
|
|
// frame = cv::Mat(cv::Size(1920, 1080), CV_8UC4, (char*)buffer, cv::Mat::AUTO_STEP);
|
|
// }
|
|
{
|
|
std::lock_guard<std::mutex> guard(g_mutex);
|
|
if (frameQueue.size() > 0) {
|
|
frame = frameQueue.front();
|
|
ts = timestampsQueue.front();
|
|
frameQueue.pop_front();
|
|
timestampsQueue.pop_front();
|
|
std::cout << "we have a frame to process..." << std::endl;
|
|
}
|
|
}
|
|
if (!frame.empty()){
|
|
cv::Rect leftROI(0, 0, frame.cols/2, frame.rows);
|
|
cv::Rect rightROI(frame.cols/2, 0, frame.cols, frame.rows);
|
|
cv::Mat imLeft = frame(leftROI);
|
|
std::printf("The Left Image width and height: %d %d", imLeft.cols, imLeft.rows);
|
|
cv::Mat imRight = frame(rightROI);
|
|
std::printf("The Right Image width and height: %d %d", imRight.cols, imRight.rows);
|
|
std::printf("The timestamp is: ", ts);
|
|
Sophus::SE3f Tcw = SLAM.TrackStereo(imLeft, imRight, ts);
|
|
Sophus::SE3f Twc = Tcw.inverse();
|
|
Eigen::Matrix<float, 3, 3> Rwc = Twc.rotationMatrix();
|
|
Eigen::Matrix<float, 3, 1> twc = Twc.translation();
|
|
// TODO: Send the Twc matrix with the saving format you've seen in System.h to the Visualizer.
|
|
// TODO: add the loading of Atlas
|
|
// TODO: implement according to the checklist.
|
|
|
|
UdpTransmitSocket transmitSocket(IpEndpointName(ADDRESS, PORT));
|
|
char buffer[OUTPUT_BUFFER_SIZE];
|
|
osc::OutboundPacketStream p(buffer, OUTPUT_BUFFER_SIZE);
|
|
|
|
std::stringstream ss;
|
|
ss << setprecision(9) << ts << Rwc(0,0) << " " << Rwc(0,1) << " " << Rwc(0,2) << " " << twc(0) << " " <<
|
|
Rwc(1,0) << " " << Rwc(1,1) << " " << Rwc(1,2) << " " << twc(1) << " " <<
|
|
Rwc(2,0) << " " << Rwc(2,1) << " " << Rwc(2,2) << " " << twc(2) << endl;
|
|
char msg[100];
|
|
strcpy(msg, ss.str().c_str());
|
|
|
|
p << osc::BeginBundleImmediate
|
|
<< osc::BeginMessage("/test3") << msg << osc::EndMessage << osc::EndBundle;
|
|
|
|
transmitSocket.Send(p.Data(), p.Size());
|
|
std::cout << "Message sent!" << std::endl;
|
|
|
|
if (use_gui) {
|
|
if (!frame.empty()) {
|
|
cv::Mat edges;
|
|
cvtColor(frame, edges, cv::COLOR_BGR2BGRA);
|
|
cv::imshow("preview", frame);
|
|
}
|
|
cv::waitKey(30);
|
|
}
|
|
// delete [] buffer;
|
|
}
|
|
}
|
|
};
|
|
|
|
std::thread t1(lambda_1, argv);
|
|
|
|
|
|
bool is_terminated = false;
|
|
while (!is_terminated) {
|
|
// g_main_iteration(false);
|
|
g_main_context_iteration(NULL, false);
|
|
}
|
|
t1.join();
|
|
#else
|
|
bool is_terminated = false;
|
|
while (!is_terminated) {
|
|
g_main_context_iteration(NULL, false);
|
|
}
|
|
#endif // MY_GST_USE_OPENCV
|
|
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
|
|
gst_object_unref (GST_OBJECT (pipeline));
|
|
|
|
return 0;
|
|
}
|