Files
cv_networking_pipeline/gst_get_ndi.cpp
PodmogilnyjIvan 682ca7160b add
2021-12-13 04:10:22 -08:00

835 lines
35 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <sstream>
#include <format>
#include <gst/video/gstvideometa.h>
#include <cstdlib>
#include <deque>
#include <iostream>
#include <mutex>
#include <fstream>
#include "osc/OscOutboundPacketStream.h"
#include "ip/UdpSocket.h"
#include "System.h"
// CHANGE HERE TO MAKE PORT
#define ADDRESS "127.0.0.1"
#define PORT 7000
#define OUTPUT_BUFFER_SIZE 1024
#define MY_GST_USE_OPENCV
#ifdef MY_GST_USE_OPENCV
#include "opencv2/opencv.hpp"
// TODO: use synchronized deque
std::mutex g_mutex;
std::deque<cv::Mat> frameQueue;
#endif // MY_GST_USE_OPENCV
cv::Mat& extract_rot(cv::Mat& rot, const cv::Mat& trans) {
// cv::Mat rot(3, 3, CV_32F, 0.0);
for (int row = 0; row < 3; ++row) {
for (int col = 0; col < 3; ++col) {
rot.at<float>(row, col) = trans.at<float>(row, col);
}
}
return rot;
}
pair<float, vector<float>> extract_deg(const cv::Mat& rotation_matrix) {
// TODO: extract_deg is producing negative values. Fix it.
float degrees;
vector<float> myvec = { rotation_matrix.at<float>(1, 2) - rotation_matrix.at<float>(2,1), rotation_matrix.at<float>(2, 0) - rotation_matrix.at<float>(0, 2), rotation_matrix.at<float>(0, 1) - rotation_matrix.at<float>(1, 0) };
float trace;
trace = rotation_matrix.at<float>(0, 0) + rotation_matrix.at<float>(1, 1) + rotation_matrix.at<float>(2, 2);
// cout << "a11 is: " << rotation_matrix.at<float>(0, 0) << " a22 is: " << rotation_matrix.at<float>(1, 1) << " a33 is: " << rotation_matrix.at<float>(2, 2) << endl;
// cout << "x is: " << (trace - 1) / 2 << endl;
degrees = acos((trace - 1) / 2);
// cout << "Calc degrees (from function) is: " << degrees << endl;
pair<float, vector<float>> result = { degrees, myvec };
return result;
}
vector<float> find_mode(const vector<vector<float>>& vec_of_rot_axes) {
cout << "Hello! This is find_mode() function" << endl;
int index = 0, counter = 0, max_counted = 0;
vector<float> el;
for (int i = 0; i < vec_of_rot_axes.size(); i++) {
el = vec_of_rot_axes[i];
cout << "Extracted el is: ";
for (auto e : el) {
cout << " " << e << " ";
}
cout << endl;
for (const auto& vec_of_rot_axe : vec_of_rot_axes) {
if (el == vec_of_rot_axe) {
cout << "Entered if (el == vec_of_rot_axe) statement" << endl;
counter += 1;
}
}
if (counter > max_counted) {
// cout << "Found new max element. Index is: " << index << "; i is: " << i << endl;
index = i;
max_counted = counter;
}
}
cout << "Index is: " << index << "; And arr size is: " << vec_of_rot_axes.size() << endl;
return vec_of_rot_axes[index];
}
GstFlowReturn new_preroll(GstAppSink* appsink, gpointer data) {
g_print("Got preroll!\n");
return GST_FLOW_OK;
}
GstFlowReturn new_sample(GstAppSink* appsink, gpointer data) {
static int framecount = 0;
framecount++;
//std::cout << "nnew frame " << framecount << std::endl;
GstSample* sample = gst_app_sink_pull_sample(appsink);
GstCaps* caps = gst_sample_get_caps(sample);
GstBuffer* buffer = gst_sample_get_buffer(sample);
const auto& n_memory = gst_buffer_n_memory(buffer);
//std::cout << "n_memory = " << n_memory << std::endl;
//std::cout << "buffer->pts = " << buffer->pts << std::endl;
//std::cout << "buffer->dts = " << buffer->dts << std::endl;
//std::cout << "buffer->duration = " << buffer->duration << std::endl;
//std::cout << "buffer->offset = " << buffer->offset << std::endl;
//std::cout << "buffer->offset_end = " << buffer->offset_end << std::endl;
const GstStructure* info = gst_sample_get_info(sample);
GstMeta* gst_meta;
gpointer state = nullptr;
while ((gst_meta = gst_buffer_iterate_meta(buffer, &state))) {
if (gst_meta->info == gst_video_caption_meta_get_info()) {
auto specific_meta = (GstVideoCaptionMeta*)gst_meta;
if (specific_meta) {
auto x = (const char*)(specific_meta->data);
std::cout << "MetaInfo is recognized to be [GstVideoCaptionMeta]"
<< "caption = " << std::string(x, specific_meta->size)
<< std::endl;
}
}
else if (gst_meta->info == gst_video_time_code_meta_get_info()) {
auto specific_meta = (GstVideoTimeCodeMeta*)gst_meta;
if (specific_meta) {
std::cout << "MetaInfo is recognized to be [GstVideoTimeCodeMeta]"
<< " h = " << specific_meta->tc.hours
<< " m = " << specific_meta->tc.minutes
<< " s = " << specific_meta->tc.seconds
<< " f = " << specific_meta->tc.frames
<< std::endl;
}
}
else if (gst_meta->info == gst_meta_get_info("GstNdiSrcMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSrcMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstNdiSinkAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstNdiSinkAudioMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoCropMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCropMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstFramePositionerMeta")) {
std::cout << "MetaInfo is recognized to be [GstFramePositionerMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstMetaDfbSurface")) {
std::cout << "MetaInfo is recognized to be [GstMetaDfbSurface]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstSubtitleMeta")) {
std::cout << "MetaInfo is recognized to be [GstSubtitleMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstRtmpMeta")) {
std::cout << "MetaInfo is recognized to be [GstRtmpMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstMpegVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstMpegVideoMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstSctpReceiveMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpReceiveMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstSctpSendMeta")) {
std::cout << "MetaInfo is recognized to be [GstSctpSendMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstCoreMediaMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreMediaMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstCoreVideoMeta")) {
std::cout << "MetaInfo is recognized to be [GstCoreVideoMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstAudioDownmixMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioDownmixMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstAudioClippingMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioClippingMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstGLSyncMeta")) {
std::cout << "MetaInfo is recognized to be [GstGLSyncMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoGLTextureUploadMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoGLTextureUploadMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoRegionOfInterestMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoRegionOfInterestMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoAFDMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAFDMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoBarMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoBarMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoMultiviewMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoMultiviewMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoOverlayCompositionMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoOverlayCompositionMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstMetaXImage")) {
std::cout << "MetaInfo is recognized to be [GstMetaXImage]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstProtectionMeta")) {
std::cout << "MetaInfo is recognized to be [GstProtectionMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstNetControlMessageMeta")) {
std::cout << "MetaInfo is recognized to be [GstNetControlMessageMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstMetaTest")) {
std::cout << "MetaInfo is recognized to be [GstMetaTest]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstNVMMParentMeta")) {
std::cout << "MetaInfo is recognized to be [GstNVMMParentMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstAudioMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstAudioLevelMeta")) {
std::cout << "MetaInfo is recognized to be [GstAudioLevelMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoAffineTransformationMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoAffineTransformationMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("GstVideoCodecAlphaMeta")) {
std::cout << "MetaInfo is recognized to be [GstVideoCodecAlphaMeta]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else if (gst_meta->info == gst_meta_get_info("XXX")) {
std::cout << "MetaInfo is recognized to be [XXX]"
<< std::endl;
}
else {
std::cout << "GstMetaInfo is not recognized."
<< " info = " << gst_meta->info
<< " api = " << gst_meta->info->api
<< std::endl;
}
}
// ---- Read frame and convert to opencv format ---------------
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);
#ifdef MY_GST_USE_OPENCV
// convert gstreamer data to OpenCV Mat, you could actually
// resolve height / width from caps...
int width = 480;
int height = 270;
GstStructure* s = gst_caps_get_structure(caps, 0);
gboolean res = true;
res &= gst_structure_get_int(s, "width", &width);
res &= gst_structure_get_int(s, "height", &height);
cv::Mat frame(cv::Size(width, height), CV_8UC4, (char*)map.data, cv::Mat::AUTO_STEP);
{
std::lock_guard<std::mutex> guard(g_mutex);
frameQueue.push_back(frame.clone());
}
#endif
gst_buffer_unmap(buffer, &map);
// ------------------------------------------------------------
// print dot every 30 frames
if (framecount % 30 == 0) {
g_print(".");
}
// show caps on first frame
if (framecount == 1) {
g_print("%s\n", gst_caps_to_string(caps));
}
gst_sample_unref(sample);
return GST_FLOW_OK;
}
static gboolean my_bus_callback(GstBus* bus, GstMessage* message, gpointer data) {
//g_print("Got %s message\n", GST_MESSAGE_TYPE_NAME(message));
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_ERROR: {
GError* err;
gchar* debug;
gst_message_parse_error(message, &err, &debug);
g_print("Error: %s\n", err->message);
g_error_free(err);
g_free(debug);
break;
}
case GST_MESSAGE_EOS: {
/* end-of-stream */
break;
} default: {
/* unhandled message */
break;
}
}
/* we want to be notified again the next time there is a message
* on the bus, so returning TRUE (FALSE means we want to stop watching
* for messages on the bus and our callback should not be called again)
*/
return TRUE;
}
static gint repeats = 2;
static gchar* ndi_name = nullptr;
static gint use_gui = 0;
static gboolean beep = FALSE;
static GOptionEntry entries[] =
{
{ "repeats", 'r', 0, G_OPTION_ARG_INT, &repeats, "Среднее число повторений N", "N" },
{ "ndi-name", 'n', 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
{ "gui", 'g', 0, G_OPTION_ARG_INT, &use_gui, "use gui", nullptr },
{ "beep", 'b', 0, G_OPTION_ARG_NONE, &beep, "Сигнал при выполнениии", NULL },
{ NULL }
};
int main(int argc, char* argv[]) {
//if (argc != 4)
//{
// cerr << endl << "Usage: ./mono_video path_to_vocabulary path_to_settings source_ndi" << endl;
// return 1;
//}
// INTPUT PARAMETERS: PATH_TO_EXE path_to_vocabulary path_to_settings source_ndi
// TODO: DON'T FORGET TO CHANGE THE run_gst_loopthrough_capture.cmd SCRIPT
std::cout << "argc = " << argc << std::endl;
GError* error = nullptr;
GOptionContext* context;
context = g_option_context_new("- test tree model performance");
g_option_context_add_main_entries(context, entries, "bla");
char** argv_gst;
argv_gst = new char* [2];
argv_gst[0] = new char[200];
argv_gst[1] = new char[200];
strcpy(argv_gst[0], argv[0]);
strcpy(argv_gst[1], argv[3]);
// QUESTION 1.
g_option_context_parse(context, &argc - 2, &argv_gst, &error);
g_option_context_parse(context, &argc, &argv, &error);
if (!ndi_name) {
std::cout << "ndi-name is not provided" << std::endl;
ndi_name = (char*)malloc(sizeof(char) * 100);
// ndi_name = "DESKTOP - O5PNOBN(Test Pattern)";
std::cout << "ndi-name (default) = '" << ndi_name << "'" << std::endl;
}
else {
std::cout << "ndi-name = '" << ndi_name << "'" << std::endl;
}
GstStateChangeReturn ret;
int fake_argc = 1;
gst_init(&fake_argc, &argv);
std::stringstream ss;
ss << "ndisrc ndi-name=\"" << ndi_name << "\" ! ndisrcdemux name=demux "
<< "demux.video ! queue ! tee name=my_tee "
<< "my_tee. ! queue ! videoconvert ! autovideosink "
<< "my_tee. ! queue ! videoconvert ! appsink name=my_sink";
std::string my_pipeline = ss.str();
GstElement* pipeline = gst_parse_launch(my_pipeline.c_str(), nullptr);
/* get sink */
GstElement* sink = gst_bin_get_by_name(GST_BIN(pipeline), "my_sink");
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
gst_app_sink_set_drop((GstAppSink*)sink, true);
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, NULL, NULL);
GstBus* bus;
guint bus_watch_id;
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, my_bus_callback, NULL);
gst_object_unref(bus);
/* Start playing */
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(pipeline);
return -1;
}
#ifdef MY_GST_USE_OPENCV
auto lambda_1 = [](char** argv) {
// --------------------------------- SLAM SYSTEM VARIABLES ---------------------------------
// Create SLAM system. It initializes all system threads and gets ready to process frames.
ORB_SLAM3::System SLAM(argv[1], argv[2], ORB_SLAM3::System::MONOCULAR, false);
std::printf("SLAM system initialized\n");
// Main loop
cv::Mat frame;
int cnt = 0;
const double time_step = 1.0;
double ts = 0;
char matrix_name[100];
vector<float> vec_of_deg, values;
vector<vector<float>> vec_of_rot_axis;
// ---- INITIALIZE FOR THE PROCESSING OF AXIS LOSS AND FOR THE AXIS VECTOR INFORMATION ----
float skew1 = 0.0;
float DIFF_TO_CENTER = 0.0;
float curr_deg; // later I'll assign the exact value
vector<float> curr_vec;
vector<float> mode1, mode2;
vector<vector<float>> accum, accum2;
int counter2, j = 0;
std::cout << "J is: " << j;
vector<float> mode_vec, mode_vec2; // 2 вектора, для аккумуляции слева и справа
// zero_flag - индикатор, что текущий элемент пошёл в обратную сторону (около нуля)
// mirror_flag - значения на данный момент должны отражаться
bool zero_flag, mirror_flag = false;
float mirror_point = 0.0;
// --------------------------------- SLAM SYSTEM VARIABLES ---------------------------------
// Let's do two steps outside the loop.
for (int i = 1; i <= 2; i++) {
if (use_gui) {
cv::namedWindow("preview", 1);
}
else {
// cv::namedWindow("no preview", 1);
}
cv::Mat frame;
char* buffer = nullptr;
// EXTRACTING FRAME HERE.
{
std::lock_guard<std::mutex> guard(g_mutex);
if (frameQueue.size() > 0) {
frame = frameQueue.front();
frameQueue.pop_front();
std::cout << "we have a frame to process..." << std::endl;
}
}
cv::Mat Tcw = SLAM.TrackMonocular(frame, ts, vector<ORB_SLAM3::IMU::Point>(), "");
cv::Mat Rot(3, 3, CV_32F, 0.0);
std::cout << Tcw << std::endl;
if (!Tcw.empty()) {
sprintf(matrix_name, "matrix%d", cnt);
extract_rot(Rot, Tcw);
// cout << "Extracted rotation matrix is: " << Rot;
auto deg_vec = extract_deg(Rot);
// QUESTION 2.
curr_deg = -deg_vec.first * 57.29;
// TODO: Invert curr_vec too. (put the minus sign to each element). (You can define the - operator fot the vector).
curr_vec = deg_vec.second;
cout << "Successfully created curr_deg and curr_vec" << endl;
// LET'S DEFINE CONSTANT TO ZERO OUT THE START
if (i == 1) {
DIFF_TO_CENTER = 0.0;
}
vec_of_deg.push_back(curr_deg - DIFF_TO_CENTER);
vec_of_rot_axis.push_back(curr_vec);
values.push_back(curr_deg - DIFF_TO_CENTER);
cout << "Successfully pushed to the vectors " << endl;
//cout << curr_deg - DIFF_TO_CENTER << " " << curr_vec[0] << " " << curr_vec[1] << " " << curr_vec[2] << endl;
// SEND THE RESULT THROUGH OSC
//outfile << curr_deg - DIFF_TO_CENTER << " " << curr_vec[0] << " " << curr_vec[1] << " " << curr_vec[2] << endl;
cout << "Successfully written to the file" << endl;
j++;
}
cnt++;
ts += time_step;
}
while (true) {
cv::Mat frame;
char* buffer = nullptr;
{
std::lock_guard<std::mutex> guard(g_mutex);
if (frameQueue.size() > 0) {
frame = frameQueue.front();
frameQueue.pop_front();
std::cout << "we have a frame to process..." << std::endl;
if (!frame.empty()) {
cv::Mat Tcw = SLAM.TrackMonocular(frame, ts, vector<ORB_SLAM3::IMU::Point>(), "");
cv::Mat Rot(3, 3, CV_32F, 0.0);
std::cout << Tcw << std::endl;
if (!Tcw.empty()) {
sprintf(matrix_name, "matrix%d", cnt);
extract_rot(Rot, Tcw);
// cout << "Extracted rotation matrix is: " << Rot;
// Extract the degree and the vector from the rotation matrix.
auto deg_vec = extract_deg(Rot); // returns a degree and a vector of rotation.
float new_deg = -deg_vec.first * 57.29 - DIFF_TO_CENTER;
vector<float> new_vec = deg_vec.second;
cout << "Successfully created curr_deg and curr_vec" << endl;
vec_of_deg.push_back(new_deg);
vec_of_rot_axis.push_back(new_vec);
j++;
cout << "Pushed to the vectors. Line 207" << endl;
// ---- II PART OF THE PROCESSING ----
// TODO: II PART OF PROCESSING MIRRORED FIRST CHANGE, BUT NOT THE REST.
// Если текущий градус больше epsilon = 5, то zero_flag = false
// Can cause a problem, when accumulating values after turning on the zero_flag.
// TODO: accum2 is full when the zero_flag enables, which is bad. work on that.
if (zero_flag) {
if ((vec_of_deg[j - 1] < -5 || vec_of_deg[j - 1] > 5) && accum2.size() == 5) {
zero_flag = false;
}
}
if (zero_flag) { cout << "Zero flag is: true" << endl; }
else { cout << "Zero flag is: false" << endl; }
// Если нет zero_flag, а в accum2 что-то есть, то опустошим его.
if (!(zero_flag) && !accum2.empty()) { accum2 = {}; }
// Сохраняем последние 5 значений векторов
if (!zero_flag) {
cout << "Line 211 ok..." << endl;
if (accum.size() == 5) {
cout << "Accum size = 5." << endl;
accum.erase(accum.begin());
cout << "Line 215 ok..." << endl;
accum.push_back(vec_of_rot_axis[j - 1]);
cout << "Line 217 ok..." << endl;
}
else {
cout << "Accum size != 5." << endl;
cout << "j is: " << j << " len of vec_of_rot_axis is: " << vec_of_rot_axis.size() << endl;
accum.push_back(vec_of_rot_axis[j - 1]);
cout << "Line 223 ok..." << endl;
}
}
// Найдем элемент, который начал расти, а не убывать около нуля
if (!zero_flag) {
if (vec_of_deg[j - 1] > -5 && vec_of_deg[j - 1] < 5) {
// Если нынешний элемент уже не меньше предыдущего, а предыдущая разность тоже около нуля, при этом абсолютная разность между градусами больше, чем 0.01
if (abs(vec_of_deg[j - 1]) >= abs(vec_of_deg[j - 2]) && (abs(vec_of_deg[j - 2] - vec_of_deg[j - 3]) < 10) && (abs(vec_of_deg[j - 1] - vec_of_deg[j - 2]) > .3)) {
zero_flag = true;
cout << "Line 233 and 232 ok..." << endl;
}
// else {
// zero_flag = false;
// }
}
}
cout << "Accum size is: " << accum.size() << endl;
cout << "Accum2 size is: " << accum2.size() << endl;
if (zero_flag) {
// Если набрали 5 элементов
cout << "Entered in zero_flag if..." << endl;
cout << "Accum2.size() is: " << accum2.size() << endl;
if (accum2.size() == 5 && accum.size() == 5) {
// Имеем массивы векторов. Найдём их моды и сравним.
cout << "Accum size: " << accum.size() << endl;
cout << "Accum2 size: " << accum2.size() << endl;
mode1 = find_mode(accum);
mode2 = find_mode(accum2);
cout << "Line 246 and 245 ok..." << endl;
bool compar_res = mode1 == mode2;
cout << "Line 250 ok..." << endl;
// Если градусы около нуля, а значения векторов поменялись, то отражаем
// Input data leave it as it as, but the output data has to be processed.
if (!(compar_res)) {
// Если мы нашли ту самую точку, то отразим точки, которые мы накопили, и прибавим к ним точку
// отражения, а также изменим точку отражения, и изменим флаг mirror_flag = True
cout << "Нашли ту самую точку!" << endl;
// mirror_point += values[j-6];
// cout << "Mirror point after: " << mirror_point << endl;
cout << "Line 255 ok..." << endl;
if (mirror_flag) {
mirror_flag = false;
}
else {
mirror_flag = true;
}
// for (int i = j-6; i < j-1; i++){
// values[i] = -values[i] + mirror_point;
// }
// cout << "Lines 263 and 264 are ok" << "j is: " << j << endl;
}
accum2 = {};
cout << "Making zero flag false..." << endl;
zero_flag = false;
}
else {
if (accum2.size() < 5) {
accum2.push_back(vec_of_rot_axis[j - 1]);
cout << "Line 274 ok..." << endl;
}
}
}
// Сохраняем значения...
if (mirror_flag) {
; cout << "Mirror flag is on;" << " vec_of_deg size: " << vec_of_deg.size() << "; j is: " << j << endl;
values.push_back(-vec_of_deg[j - 1] + mirror_point);
// cout << "Line 281 ok..." << endl;
}
else {
; cout << "Mirror flag is off" << " vec_of_deg size: " << vec_of_deg.size() << "; j is: " << j << endl;
values.push_back(vec_of_deg[j - 1]);
// cout << "Line 284 ok..." << endl;
}
cout << "Processed value is: " << values[j - 1] << endl; cout << " " << endl;
// --------- I PART OF THE PROCESSING ---------
// values[j-1] += skew1;
// float diff = (values[j-2] - values[j-1]);
// cout << "New deg is: " << new_deg << "Diff is: " << diff << endl;
//
//
// // Если разница больше 10, то скорее всего произошла потеря.
// if (abs(diff) > 10) {
// cout << "Diff is more than 10; Correcting... " << endl;
// values[j-1] += diff;
// skew1 += diff;
// }
// --------- I PART OF THE PROCESSING ---------
// Запись в файл.
//outfile << values[j - 1] << " " << new_vec[0] << " " << new_vec[1] << " " << new_vec[2] << " " << cnt << endl;
// cout << "Successfully written to the file" << endl;
// Выполнить отправку в протокол OSC.
//cv::Vec3d res(1., 1., 1.);
//std::cout << "defined Vector is: " << res[0] << res[1] << res[2] << std::endl;
std::cout << "message received!" << std::endl;
UdpTransmitSocket transmitSocket(IpEndpointName(ADDRESS, PORT));
char buffer[OUTPUT_BUFFER_SIZE];
osc::OutboundPacketStream p(buffer, OUTPUT_BUFFER_SIZE);
std::string str;
str = std::to_string(values[j-1]) + " " + std::to_string(new_vec[0]) + " " + std::to_string(new_vec[1]) + " " + std::to_string(new_vec[2]);
char msg[40];
strcpy(msg, str.c_str());
p << osc::BeginBundleImmediate
<< osc::BeginMessage("/test3") << msg << osc::EndMessage
/* << osc::BeginMessage("/test2")
<< true << 24 << (float)10.8 << "world" << osc::EndMessage*/
<< osc::EndBundle;
//p << osc::BeginBundleImmediate
// << osc::BeginMessage("/test1")
// //res[0] << res[1] << res[2] <<
// << true << "blah" << osc::EndMessage << osc::EndBundle;
////<< osc::BeginMessage("/test2")
////<< true << 24 << (float)10.8 << "world" << osc::EndMessage
transmitSocket.Send(p.Data(), p.Size());
std::cout << "Message sent!" << std::endl;
// ---- II PART OF THE PROCESSING ----
curr_deg = new_deg;
curr_vec = new_vec;
}
cnt++;
ts += time_step;
}
}
else {
std::cout << "Don't have any frames yet ..." << std::endl;
//std::cout << "";
}
}
delete[] buffer;
}
std::printf("End of video\n");
// Stop all threads
SLAM.Shutdown();
std::printf("Done.\n");
};
std::cout << "Lambda function defined ..." << std::endl;
char** argv_orb;
argv_orb = new char* [3];
argv_orb[0] = new char[300];
argv_orb[1] = new char[300];
argv_orb[2] = new char[300];
strcpy(argv_orb[0], argv[0]);
strcpy(argv_orb[1], argv[1]);
strcpy(argv_orb[2], argv[2]);
std::thread t1(lambda_1, argv_orb);
std::cout << "Lambda function works ini the thread t1 ..." << std::endl;
bool is_terminated = false;
while (!is_terminated) {
// g_main_iteration(false);
g_main_context_iteration(NULL, false);
}
t1.join();
#else
bool is_terminated = false;
while (!is_terminated) {
g_main_context_iteration(NULL, false);
}
#endif // MY_GST_USE_OPENCV
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
return 0;
}