This commit is contained in:
Ivan
2022-06-09 16:13:32 +03:00
parent 24e2a91778
commit 6fc9ff1ae3
175 changed files with 29765 additions and 8329 deletions

View File

@@ -1,4 +1,229 @@
//
// Created by ivan on 20.05.2022.
//
#include <cstdio>
#include <chrono>
#include <string>
#include <iostream>
#include <csignal>
#include <Processing.NDI.Lib.h>
#include <thread>
#include "opencv2/opencv.hpp"
#ifdef _WIN32
#ifdef _WIN64
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
#else // _WIN64
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
#endif // _WIN64
#endif // _WIN32
using namespace std::chrono;
static bool is_terminated = false;
std::deque<cv::Mat> frameQueue;
std::deque<double> timestampsQueue;
std::mutex ndi_mutex;
void signalHandler(int signum) {
// Ctrl+C -> SIGINT
// SystemD -> SIGTERM
std::cout << "Interrupt signal (" << signum << ") received.";
is_terminated = true;
};
int main(int argc, char* argv[]) {
if (argc < 2) {
std::cout << "too few arguments" << std::endl;
return -1;
}
signal(SIGINT, signalHandler);
signal(SIGTERM, signalHandler);
std::string magic_string = argv[1];
std::cout << "we got the ndi-stream-name = " << magic_string << std::endl;
// Not required, but "correct" (see the SDK documentation.
if (!NDIlib_initialize()) {
return 0;
}
// We are going to create an NDI finder that locates sources on the network.
NDIlib_find_instance_t pNDI_find = NDIlib_find_create_v2();
if (!pNDI_find) {
return 0;
}
// Run for one minute
uint32_t no_sources = 0;
const NDIlib_source_t* p_sources = nullptr;
int stream_offset = -1;
while (stream_offset < 0 && !is_terminated) {
printf("Looking for sources ...\n");
NDIlib_find_wait_for_sources(pNDI_find, 5000/* One second */);
// // Wait up till 5 seconds to check for new sources to be added or removed
// if (!NDIlib_find_wait_for_sources(pNDI_find, 5000/* 5 seconds */)) {
// printf("No change to the sources found.\n");
// continue;
// }
p_sources = NDIlib_find_get_current_sources(pNDI_find, &no_sources);
// Display all the sources.
printf("Network sources (%u found).\n", no_sources);
for (uint32_t i = 0; i < no_sources; i++) {
printf("%u. %s\n", i + 1, p_sources[i].p_ndi_name);
}
for (uint32_t i = 0; i < no_sources; i++) {
const auto& the_name = std::string(p_sources[i].p_ndi_name);
if (the_name == magic_string) {
stream_offset = static_cast<int>(i);
std::cout << "selecting the stream [" << the_name << "] with the index = " << i << std::endl;
break;
} else {
std::cout << "the stream [" << the_name << "] noes not match. index = " << i << std::endl;
}
}
if (stream_offset >= 0) {
std::cout << "found the stream" << std::endl;
break;
}
std::cout << "no such stream" << std::endl;
}
// We now have at least one source, so we create a receiver to look at it.
// NDIlib_recv_create_v3_t pNDI_recv_parameters;
// pNDI_recv_parameters.color_format = NDIlib_recv_color_format_RGBX_RGBA;
// pNDI_recv_parameters.color_format = NDIlib_recv_color_format_fastest;
NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v3();
if (!pNDI_recv) {
return 0;
}
// Connect to our sources
NDIlib_recv_connect(pNDI_recv, p_sources + stream_offset);
// Destroy the NDI finder. We needed to have access to the pointers to p_sources[0]
NDIlib_find_destroy(pNDI_find);
// Run for one minute
using namespace std::chrono;
int counter = 0;
cv::namedWindow("test2");
auto lambda_1 = [] (char** argv) {
while (true) {
cv::Mat frame;
if (frameQueue.size() > 0) {
std::lock_guard<std::mutex> guard(ndi_mutex);
frame = frameQueue.front();
// auto ts = timestampsQueue.front();
frameQueue.pop_front();
// timestampsQueue.pop_front();
std::cout << "we have a frame to process..." << std::endl;
cv::imshow("test2", frame);
cv::waitKey(0);
}
}
};
std::thread t1(lambda_1, argv);
// container for the images
// auto p_data = (uint8_t*)malloc(2452 * 370 * 4 * sizeof(uint8_t));
while (!is_terminated) {
counter += 1;
// The descriptors
NDIlib_video_frame_v2_t video_frame;
// NDIlib_audio_frame_v3_t audio_frame;
NDIlib_metadata_frame_t metadata_frame;
const auto &timeout = 5000;
const auto &ret = NDIlib_recv_capture_v3(pNDI_recv, &video_frame, nullptr, &metadata_frame, timeout);
switch (ret) {
case NDIlib_frame_type_none: { // No data
printf("No data received.\n");
break;
}
case NDIlib_frame_type_video: { // Video data
printf("%d -- Video data received (%dx%d).\n", counter, video_frame.xres, video_frame.yres);
std::cout << "timestand & timecode: " << video_frame.timestamp << " " << video_frame.timecode
<< std::endl;
std::cout << "The frame format type is: " << video_frame.frame_format_type << std::endl;
std::cout << "The type of data in the frame is: " << video_frame.FourCC << std::endl;
int len = 0;
if (video_frame.p_metadata) {
for (int i = 0;; ++i) {
// std::cout << i << std::endl;
if (video_frame.p_metadata[i] == 0) {
len = i;
break;
}
}
std::cout << "metadata len = " << len << " "
<< std::string(video_frame.p_metadata)
<< std::endl;
} else {
std::cout << "video_frame.p_metadata is nullptr" << std::endl;
std::cout << "metadata len = " << len << std::endl;
}
if (video_frame.p_data != nullptr){
std::cout << "The frame format type is: " << video_frame.frame_format_type << std::endl;
std::cout << std::endl << "The array obtained value of video_frame: " << std::endl;
for (int i = 0; i < 2; i++){
for (int j = 0; j < video_frame.xres; j++){
std::cout << (int)video_frame.p_data[i*video_frame.xres + j] << " ";
}
std::cout << std::endl;
}
std::cout << std::endl;
// auto memsize = video_frame.xres * video_frame.yres * 4 * sizeof(uint8_t);
// memcpy(p_data, video_frame.p_data, memsize);
cv::Mat frame(video_frame.yres, video_frame.xres, CV_8UC2, video_frame.p_data, cv::Mat::AUTO_STEP);
cv::cvtColor(frame, frame, cv::COLOR_YUV2BGR_UYVY, 0);
// memcpy(frame.data, video_frame.p_data, frame.rows * frame.cols * frame.channels() * sizeof(uint8_t));
std::cout << "The image contents are: " << std::endl;
for (int i = 0; i < 2; i++){
for (int j = 0; j < video_frame.xres; j++){
std::cout << frame.at<cv::Vec4b>(i, j) << " ";
}
std::cout << std::endl;
}
// cvtColor(frame, frame, cv::COLOR_BGRA2RGBA);
std::cout << "The frame dimensions: " << frame.cols << " " << frame.rows << std::endl;
// Lock before you append something
std::lock_guard<std::mutex> guard(ndi_mutex);
frameQueue.push_back(frame.clone());
// free(p_data);
}
// cv::imshow("test2", frame);
// cv::waitKey(0);
std::cout << "space (video frame)" << std::endl;
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
break;
}
// case NDIlib_frame_type_audio: { // Audio data
// printf("Audio data received (%d samples).\n", audio_frame.no_samples);
// NDIlib_recv_free_audio_v3(pNDI_recv, &audio_frame);
// break;
// }
case NDIlib_frame_type_metadata: {
printf("Metadata received (%d length).\n", metadata_frame.length);
std::cout << std::string(metadata_frame.p_data) << std::endl;
std::cout << "space (metadata)" << std::endl;
NDIlib_recv_free_metadata(pNDI_recv, &metadata_frame);
break;
}
default:
printf("other case");
std::cout << ret << std::endl;
}
}
t1.join();
// Destroy the receiver
NDIlib_recv_destroy(pNDI_recv);
// Not required, but nice
NDIlib_destroy();
// Finished
return 0;
}