Files
cv_networking_pipeline/gst_get_ndi_v6_7.cpp
PodmogilnyjIvan b2a58ddcbf commiting chagnes
2022-01-21 05:30:16 -08:00

740 lines
29 KiB
C++

#include <gst/gst.h>
#include <gst/app/gstappsink.h>
//#include <gst/
#include <sstream>
//#include <format>
#include <gst/video/gstvideometa.h>
#include <cstdlib>
#include <deque>
#include <iostream>
#include <thread>
#include <mutex>
#include <fstream>
#include <chrono>
#include "osc/OscOutboundPacketStream.h"
#include "ip/UdpSocket.h"
#include "processing_functions.h"
#include <Converter.h>
#include "System.h"
using namespace std::chrono;
// CHANGE HERE TO MAKE PORT
#define ADDRESS "127.0.0.1"
#define PORT 7000
#define OUTPUT_BUFFER_SIZE 1024
#define MY_GST_USE_OPENCV
#ifdef MY_GST_USE_OPENCV
#include "opencv2/opencv.hpp"
// TODO: use synchronized deque
std::mutex g_mutex;
std::deque<cv::Mat> frameQueue;
#endif // MY_GST_USE_OPENCV
cv::Mat& extract_rot(cv::Mat& rot, const cv::Mat& trans) {
// cv::Mat rot(3, 3, CV_32F, 0.0);
for (int row = 0; row < 3; ++row) {
for (int col = 0; col < 3; ++col) {
rot.at<float>(row, col) = trans.at<float>(row, col);
}
}
return rot;
}
pair<float, vector<float>> extract_deg(const cv::Mat& rotation_matrix) {
// TODO: extract_deg is producing negative values. Fix it.
float degrees;
vector<float> myvec = { rotation_matrix.at<float>(1, 2) - rotation_matrix.at<float>(2,1), rotation_matrix.at<float>(2, 0) - rotation_matrix.at<float>(0, 2), rotation_matrix.at<float>(0, 1) - rotation_matrix.at<float>(1, 0) };
float trace;
trace = rotation_matrix.at<float>(0, 0) + rotation_matrix.at<float>(1, 1) + rotation_matrix.at<float>(2, 2);
// cout << "a11 is: " << rotation_matrix.at<float>(0, 0) << " a22 is: " << rotation_matrix.at<float>(1, 1) << " a33 is: " << rotation_matrix.at<float>(2, 2) << endl;
// cout << "x is: " << (trace - 1) / 2 << endl;
degrees = acos((trace - 1) / 2);
// cout << "Calc degrees (from function) is: " << degrees << endl;
pair<float, vector<float>> result = { degrees, myvec };
return result;
}
vector<float> find_mode(const vector<vector<float>>& vec_of_rot_axes) {
cout << "Hello! This is find_mode() function" << endl;
int index = 0, counter = 0, max_counted = 0;
vector<float> el;
for (int i = 0; i < vec_of_rot_axes.size(); i++) {
el = vec_of_rot_axes[i];
cout << "Extracted el is: ";
for (auto e : el) {
cout << " " << e << " ";
}
cout << endl;
for (const auto& vec_of_rot_axe : vec_of_rot_axes) {
if (el == vec_of_rot_axe) {
cout << "Entered if (el == vec_of_rot_axe) statement" << endl;
counter += 1;
}
}
if (counter > max_counted) {
// cout << "Found new max element. Index is: " << index << "; i is: " << i << endl;
index = i;
max_counted = counter;
}
}
cout << "Index is: " << index << "; And arr size is: " << vec_of_rot_axes.size() << endl;
return vec_of_rot_axes[index];
}
void send_euler_to_receiver_osc(vector<float> euler_angles, int counter = -1) {
// Euler angles are recieved in Radians.
//euler_angles[0] *= 57.29;
//euler_angles[1] *= 57.29;
//euler_angles[2] *= 57.29;
std::string str;
char msg[40];
UdpTransmitSocket transmitSocket(IpEndpointName(ADDRESS, PORT));
char buffer[OUTPUT_BUFFER_SIZE];
osc::OutboundPacketStream p(buffer, OUTPUT_BUFFER_SIZE);
if (counter == -1) {
str = std::to_string(euler_angles[0]) + " " + std::to_string(euler_angles[1]) + " " + std::to_string(euler_angles[2]);
}
else {
str = std::to_string(euler_angles[0]) + " " + std::to_string(euler_angles[1]) + " " + std::to_string(euler_angles[2]) + " " + std::to_string(counter);
}
strcpy(msg, str.c_str());
p << osc::BeginBundleImmediate << osc::BeginMessage("/test3") << msg << osc::EndMessage << osc::EndBundle;
transmitSocket.Send(p.Data(), p.Size());
memset(msg, 0, 40);
};
cv::Mat axisVector2Rot(float theta, vector<float> v) {
cv::Mat Rot(3, 3, CV_32F, 0.0);
float c = cos(theta);
float s = sin(theta);
float t = 1 - c;
Rot.at<float>(0, 0) = t * v[0] * v[0] + c;
Rot.at<float>(0, 1) = t * v[0] * v[1] - v[2] * s;
Rot.at<float>(0, 2) = t * v[0] * v[2] + v[1] * c;
Rot.at<float>(1, 0) = t * v[0] * v[1] + v[2] * s;
Rot.at<float>(1, 1) = t * v[1] * v[1] + c;
Rot.at<float>(1, 2) = t * v[1] * v[2] - v[0] * s;
Rot.at<float>(2, 0) = t * v[0] * v[2] - v[1] * s;
Rot.at<float>(2, 1) = t * v[1] * v[2] + v[0] * s;
Rot.at<float>(2, 2) = t * v[2] * v[2] + c;
return Rot;
};
vector<float> axisVector2Euler(float theta, vector<float> axis) {
vector<float> euler_angles;
cv::Mat Rot = axisVector2Rot(theta, axis);
euler_angles = ORB_SLAM3::Converter::toEuler(Rot);
return euler_angles;
};
GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) {
g_print ("Got preroll!\n");
return GST_FLOW_OK;
}
GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) {
static int framecount = 0;
framecount++;
static long long ms = 0;
auto new_ms = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch()).count();
std::cout << "nnew frame " << (new_ms - ms) << " " << framecount << std::endl;
ms = new_ms;
GstSample *sample = gst_app_sink_pull_sample(appsink);
GstCaps *caps = gst_sample_get_caps(sample);
GstBuffer *buffer = gst_sample_get_buffer(sample);
const auto& n_memory = gst_buffer_n_memory(buffer);
std::cout << "n_memory = " << n_memory << std::endl;
std::cout << "buffer->pts = " << buffer->pts << std::endl;
std::cout << "buffer->dts = " << buffer->dts << std::endl;
std::cout << "buffer->duration = " << buffer->duration << std::endl;
std::cout << "buffer->offset = " << buffer->offset << std::endl;
std::cout << "buffer->offset_end = " << buffer->offset_end << std::endl;
const GstStructure *info = gst_sample_get_info(sample);
GstMeta *gst_meta;
gpointer state = nullptr;
while ((gst_meta = gst_buffer_iterate_meta(buffer, &state))) {
if (gst_meta->info == gst_video_caption_meta_get_info()) {
auto specific_meta = (GstVideoCaptionMeta *) gst_meta;
if (specific_meta) {
auto x = (const char *) (specific_meta->data);
std::cout << "MetaInfo is recognized to be [GstVideoCaptionMeta]"
<< "caption = " << std::string(x, specific_meta->size)
<< std::endl;
}
} else if (gst_meta->info == gst_video_time_code_meta_get_info()) {
auto specific_meta = (GstVideoTimeCodeMeta *) gst_meta;
if (specific_meta) {
std::cout << "MetaInfo is recognized to be [GstVideoTimeCodeMeta]"
<< " h = " << specific_meta->tc.hours
<< " m = " << specific_meta->tc.minutes
<< " s = " << specific_meta->tc.seconds
<< " f = " << specific_meta->tc.frames
<< std::endl;
}
} else if (gst_meta->info == gst_meta_get_info("GstNdiSrcMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSrcMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNdiSinkAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSinkAudioMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoCropMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCropMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstFramePositionerMeta")) {
std::cout << "MetaInfo is recognized to be [GstFramePositionerMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaDfbSurface")) {
std::cout << "MetaInfo is recognized to be [GstMetaDfbSurface]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSubtitleMeta")) {
std::cout << "MetaInfo is recognized to be [GstSubtitleMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRtmpMeta")) {
std::cout << "MetaInfo is recognized to be [GstRtmpMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMpegVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstMpegVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSctpReceiveMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpReceiveMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstSctpSendMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpSendMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstCoreMediaMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreMediaMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstCoreVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreVideoMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioDownmixMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioDownmixMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioClippingMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioClippingMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstGLSyncMeta")) {
std::cout << "MetaInfo is recognized to be [GstGLSyncMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoGLTextureUploadMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoGLTextureUploadMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoRegionOfInterestMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoRegionOfInterestMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoAFDMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAFDMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoBarMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoBarMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoMultiviewMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMultiviewMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoOverlayCompositionMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoOverlayCompositionMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaXImage")) {
std::cout << "MetaInfo is recognized to be [GstMetaXImage]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstProtectionMeta")) {
std::cout << "MetaInfo is recognized to be [GstProtectionMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNetControlMessageMeta")) {
std::cout << "MetaInfo is recognized to be [GstNetControlMessageMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstMetaTest")) {
std::cout << "MetaInfo is recognized to be [GstMetaTest]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstNVMMParentMeta")) {
std::cout << "MetaInfo is recognized to be [GstNVMMParentMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstAudioLevelMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioLevelMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoAffineTransformationMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAffineTransformationMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("GstVideoCodecAlphaMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCodecAlphaMeta]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
} else {
std::cout << "GstMetaInfo is not recognized."
<< " info = " << gst_meta->info
<< " api = " << gst_meta->info->api
<< std::endl;
}
}
// ---- Read frame and convert to opencv format ---------------
// return GST_FLOW_OK;
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
#ifdef MY_GST_USE_OPENCV
// convert gstreamer data to OpenCV Mat, you could actually
// resolve height / width from caps...
int width = 2560;
int height = 1440;
int depth = 4;
int bpp = -1;
GstStructure *s = gst_caps_get_structure(caps, 0);
gboolean res = true;
res &= gst_structure_get_int (s, "width", &width);
res &= gst_structure_get_int (s, "height", &height);
// res &= gst_structure_get_int (s, "depth", &depth);
// res &= gst_structure_get_int (s, "bpp", &bpp);
if (gst_structure_get_field_type (s, "format") == G_TYPE_STRING) {
const char *string;
string = gst_structure_get_string (s, "format");
// std::cout << "flksjlskfjsjdlkf" << string << std::endl;
// fourcc = GST_STR_FOURCC (string);
// } else if (gst_structure_get_field_type (s, "format") == GST_TYPE_FOURCC) {
// gst_structure_get_fourcc (s, "format", &fourcc);
} else {
// fourcc = 0;
}
assert (depth == 4);
cv::Mat frame(cv::Size(width, height), CV_8UC4, (char*)map.data, cv::Mat::AUTO_STEP);
// int frameSize = map.size;
std::cout << "size from caps = (" << width << "," << height << "," << depth << "," << bpp << ")" << "res =" << res
<< " total size = " << map.size
<< std::endl;
// if (res) {
// std::fstream file("example.bin", std::ios::out | std::ios::binary | std::ios::app);
// file.write((char*)map.data, map.size);
// file.close();
// }
// throw 1;
{
std::lock_guard<std::mutex> guard(g_mutex);
frameQueue.push_back(frame.clone());
}
#endif
gst_buffer_unmap(buffer, &map);
// ------------------------------------------------------------
// print dot every 30 frames
if (framecount%30 == 0) {
g_print (".");
}
// show caps on first frame
if (framecount == 1) {
g_print ("%s\n", gst_caps_to_string(caps));
}
gst_sample_unref (sample);
return GST_FLOW_OK;
}
static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) {
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (message, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
break;
}
case GST_MESSAGE_EOS: {
/* end-of-stream */
break;
} default: {
/* unhandled message */
break;
}
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
static gboolean overrun_callback (GstElement * queue, gpointer udata) {
std::cout << "hi from overrun" << std::endl;
return TRUE;
}
static gchar* ndi_name = nullptr;
static gint use_opencv_preview = 0;
static gboolean use_gst_preview = FALSE;
static GOptionEntry entries[] =
{
{ "ndi-name", 0, 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
{ "opencv-preview", 0, 0, G_OPTION_ARG_INT, &use_opencv_preview, "use opencv preview", NULL },
{ "gst-preview", 0, 0, G_OPTION_ARG_INT, &use_gst_preview, "use gstreamer preview", NULL },
{ NULL }
};
int main (int argc, char *argv[]) {
std::cout << "argc = " << argc << std::endl;
//for (int i = 0; i < argc; i++) {
// std::cout << argv[i] << std::endl;
//}
GError *error = nullptr;
GOptionContext *context;
context = g_option_context_new("- test tree model performance");
g_option_context_add_main_entries(context, entries, "bla");
char** argv_gst;
argv_gst = new char* [4];
argv_gst[0] = new char[200];
argv_gst[1] = new char[200];
argv_gst[2] = new char[200];
argv_gst[3] = new char[200];
std::strcpy(argv_gst[0], argv[0]);
std::strcpy(argv_gst[1], argv[1]);
std::strcpy(argv_gst[2], argv[2]);
std::strcpy(argv_gst[3], argv[3]);
for (int i = 0; i < argc-3; i++) {
std::cout << argv_gst[i] << std::endl;
}
int argc_gst = argc - 3;
// QUESTION 1.
g_option_context_parse(context, &argc_gst, &argv_gst, &error);
//g_option_context_parse(context, &argc, &argv, &error);
if (!ndi_name) {
std::cout << "ndi-name is not provided" << std::endl;
// ndi_name = (char*)malloc(sizeof(char) * 100);
ndi_name = "DESKTOP-O5PNOBN (Test Pattern)";
std::cout << "ndi-name (default) = '" << ndi_name << "'" << std::endl;
} else {
std::cout << "ndi-name = '" << ndi_name << "'" << std::endl;
}
GstStateChangeReturn ret;
int fake_argc = 1;
gst_init (&fake_argc, &argv);
/*
s_pipeline = f'ndisrc ndi-name="{ndi_name}" ! ndisrcdemux name=demux ' \
f'demux.video ! queue ! tee name=my_tee ' \
f'my_tee. ! queue ! videoconvert ! autovideosink ' \
f'my_tee. ! queue ! videoconvert ! appsink name=my_sink'
*/
auto sink_caps = "video/x-raw, format=(string)BGRA";
std::stringstream ss;
ss << "ndisrc ndi-name=\"" << ndi_name << "\" ! ndisrcdemux name=demux "
<< "demux.video ! queue ! tee name=my_tee ";
if (use_gst_preview) {
ss << "my_tee. ! queue name=q_show ! videoconvert ! autovideosink ";
}
ss << "my_tee. ! queue name=q_appsink ! videoconvert name=convert_slow ! appsink name=my_sink caps=\"" << sink_caps << "\"";
// ss << "my_tee. ! queue name=q_appsink ! glupload ! glcolorconvert ! \"video/x-raw(memory:GLMemory),format=BGR\" ! appsink name=my_sink ";
std::string my_pipeline = ss.str();
std::cout << "We are about to launch the pipeline = [" << my_pipeline.c_str() << "]" << std::endl;
GstElement *pipeline = gst_parse_launch(my_pipeline.c_str(), nullptr);
std::cout << "Launching pipeline success" << std::endl;
// g_object_set (my_src, "ndi-name", "ADMIN (lafvi 29.97fps)", NULL);
/*
[libndi_newtek @ 0x556ab959f400] Found 4 NDI sources:
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (CameraVegasAR)' '185.41.112.227:5962'
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (NVIDIA Quadro RTX 5000 1)' '185.41.112.227:5961'
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (NVIDIA Quadro RTX 5000 2)' '185.41.112.227:5963'
[libndi_newtek @ 0x556ab959f400] 'DESKTOP-O5PNOBN (Test Pattern)' '185.41.112.227:5964'
*/
// g_object_set (my_src, "ndi-name", ndi_name, NULL);
/* get sink */
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "my_sink");
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
gst_app_sink_set_drop((GstAppSink*)sink, true);
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);
std::cout << "Launching sink success" << std::endl;
GstBus *bus;
guint bus_watch_id;
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
gst_object_unref (bus);
std::cout << "Launching bus success" << std::endl;
{
GstElement *e = gst_bin_get_by_name(GST_BIN (pipeline), "q_appsink");
g_signal_connect (e, "overrun", G_CALLBACK(overrun_callback), NULL);
}
{
GstElement *e = gst_bin_get_by_name(GST_BIN (pipeline), "convert_slow");
g_object_set(e, "n-threads", 1, nullptr);
}
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
std::cout << "Start playing success" << std::endl;
#ifdef MY_GST_USE_OPENCV
bool pangolin_window;
std::cout << argv[6] << std::endl;
string pangolin_choice(argv[6]);
if (pangolin_choice == "--orb_slam_window=1") {
pangolin_window = true;
}
else {
pangolin_window = false;
}
std::cout << "cout success" << std::endl;
std::cout << pangolin_window << std::endl;
auto lambda_1 = [] (char** argv, bool pangolin_preview) {
// --------------------------------- SLAM SYSTEM VARIABLES ---------------------------------
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM3::System SLAM(argv[1], argv[2], ORB_SLAM3::System::MONOCULAR, pangolin_preview);
std::printf("SLAM system initialized\n");
cv::Mat frame;
int cnt = 0;
int ts = 0;
float time_step = 1.0;
char matrix_name[100] = "ORB_SLAM3 matrix";
ofstream frames_log;
frames_log.open("lost_log.txt");
if (!frames_log) {
cerr << "Error; File could not be opened";
exit(1);
}
bool lost_flag = 0;
float initial_skew = atof(argv[3]);
std::cout << initial_skew << std::endl;
vector<int> all_maps_id = { 0 };
vector<float> euler_prev = { 0.0, 0.0, 0.0 }, euler_now = { 0.0, 0.0, 0.0 }, skew_angle = { 0.0, 0.0, 0.0 };
int prevID, currID;
// Processing lost of the frames. Just substituting with the average velocity
vector<vector<float>> recent_values;
int recent_values_desired_length = 15;
vector<float> avg_velocity = { 0.0, 0.0, 0.0 };
while (true) {
cv::Mat frame;
char* buffer = nullptr;
// EXTRACTING FRAME HERE.
{
std::lock_guard<std::mutex> guard(g_mutex);
if (frameQueue.size() > 0) {
frame = frameQueue.front();
frameQueue.pop_front();
//std::cout << "we have a frame to process..." << std::endl;
}
}
if (!frame.empty()) {
cvtColor(frame, frame, cv::COLOR_RGBA2RGB);
cv::Mat Tcw = SLAM.TrackMonocular(frame, ts, vector<ORB_SLAM3::IMU::Point>(), "");
cv::Mat Rot(3, 3, CV_32F, 0.0);
if (!Tcw.empty()) {
lost_flag = 0;
sprintf(matrix_name, "matrix%d", cnt);
//file << matrix_name << Tcw;
cv::Mat Rot(3, 3, CV_32F, 1);
extract_rot(Rot, Tcw);
auto euler = ORB_SLAM3::Converter::toEuler(Rot);
euler = euler * 57.29f;
euler_now = -euler;
cout << euler_now[0] << " " << euler_now[1] << " " << euler_now[2] << " " << endl;
currID = SLAM.GetCurID();
process_euler(euler_prev, euler_now, skew_angle, all_maps_id, prevID, currID, avg_velocity);
fill_recent_values(euler_now, recent_values, recent_values_desired_length);
frames_log << euler_now[0] << " " << euler_now[1] + initial_skew << " " << euler_now[2] << " " << cnt \
<< " " << SLAM.GetCurID() << " " << lost_flag << endl;
cout << euler_now[0] << " " << euler_now[1] + initial_skew << " " << euler_now[2] << " " << cnt << endl;
cout << "Map ID right now: " << SLAM.GetCurID() << endl;
cout << endl;
send_euler_to_receiver_osc(euler_now + initial_skew, cnt);
}
else {
cout << "Tcw is empty. Processing lost values." << endl;
lost_flag = 1;
// process_lost_euler(euler_prev, euler_now, recent_values_desired_length, recent_values, avg_velocity);
euler_now = { 0.0, 0.0, 0.0 };
// frames_log << euler_now[0] << " " << euler_now[1] - initial_skew << " " << euler_now[2] << " " << cnt << " " \
// << SLAM.GetCurrID() << " " << lost_flag << endl;
frames_log << euler_now[0] << " " << euler_now[1] << " " << euler_now[2] << " " << cnt << " " \
<< SLAM.GetCurID() << " " << lost_flag << endl;
// cout << euler_now[0] << " " << euler_now[1] - initial_skew << " " << euler_now[2] << " " << cnt << endl;
cout << euler_now[0] << " " << euler_now[1] << " " << euler_now[2] << " " << cnt << endl;
send_euler_to_receiver_osc(euler_now + initial_skew, cnt);
}
cnt++;
ts += time_step;
}
if (use_opencv_preview) {
if (!frame.empty()) {
cv::namedWindow("preview", 1);
cv::Mat edges;
cvtColor(frame, edges, cv::COLOR_BGR2BGRA);
cv::imshow("preview", frame);
}
cv::waitKey(30);
//cv::destroyAllWindows();
}
delete[] buffer;
}
printf("End of video\n");
// Stop all threads
SLAM.Shutdown();
printf("Done.\n");
};
char** argv_orb;
argv_orb = new char* [4];
argv_orb[0] = new char[300];
argv_orb[1] = new char[300];
argv_orb[2] = new char[300];
argv_orb[3] = new char[300];
std::strcpy(argv_orb[0], argv[0]);
std::strcpy(argv_orb[1], argv[4]);
std::strcpy(argv_orb[2], argv[5]);
std::strcpy(argv_orb[3], argv[7]);
std::thread t1(lambda_1, argv_orb, pangolin_window);
bool is_terminated = false;
while (!is_terminated) {
// g_main_iteration(false);
g_main_context_iteration(NULL, false);
}
t1.join();
#else
bool is_terminated = false;
while (!is_terminated) {
g_main_context_iteration(NULL, false);
}
#endif // MY_GST_USE_OPENCV
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}