170 lines
6.5 KiB
C++
170 lines
6.5 KiB
C++
#include <cstdio>
|
|
#include <chrono>
|
|
#include <string>
|
|
#include <iostream>
|
|
#include <csignal>
|
|
#include <Processing.NDI.Lib.h>
|
|
#include "opencv2/opencv.hpp"
|
|
#ifdef _WIN32
|
|
#ifdef _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
|
|
#else // _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
|
|
#endif // _WIN64
|
|
#endif // _WIN32
|
|
|
|
using namespace std::chrono;
|
|
|
|
static bool is_terminated = false;
|
|
|
|
void signalHandler(int signum) {
|
|
// Ctrl+C -> SIGINT
|
|
// SystemD -> SIGTERM
|
|
std::cout << "Interrupt signal (" << signum << ") received.";
|
|
is_terminated = true;
|
|
};
|
|
|
|
int main(int argc, char* argv[]) {
|
|
if (argc < 2) {
|
|
std::cout << "too few arguments" << std::endl;
|
|
return -1;
|
|
}
|
|
signal(SIGINT, signalHandler);
|
|
signal(SIGTERM, signalHandler);
|
|
std::string magic_string = argv[1];
|
|
std::cout << "we got the ndi-stream-name = " << magic_string << std::endl;
|
|
|
|
// Not required, but "correct" (see the SDK documentation.
|
|
if (!NDIlib_initialize()) {
|
|
return 0;
|
|
}
|
|
// We are going to create an NDI finder that locates sources on the network.
|
|
NDIlib_find_instance_t pNDI_find = NDIlib_find_create_v2();
|
|
if (!pNDI_find) {
|
|
return 0;
|
|
}
|
|
// Run for one minute
|
|
uint32_t no_sources = 0;
|
|
const NDIlib_source_t* p_sources = nullptr;
|
|
int stream_offset = -1;
|
|
while (stream_offset < 0 && !is_terminated) {
|
|
printf("Looking for sources ...\n");
|
|
NDIlib_find_wait_for_sources(pNDI_find, 5000/* One second */);
|
|
// // Wait up till 5 seconds to check for new sources to be added or removed
|
|
// if (!NDIlib_find_wait_for_sources(pNDI_find, 5000/* 5 seconds */)) {
|
|
// printf("No change to the sources found.\n");
|
|
// continue;
|
|
// }
|
|
p_sources = NDIlib_find_get_current_sources(pNDI_find, &no_sources);
|
|
// Display all the sources.
|
|
printf("Network sources (%u found).\n", no_sources);
|
|
for (uint32_t i = 0; i < no_sources; i++) {
|
|
printf("%u. %s\n", i + 1, p_sources[i].p_ndi_name);
|
|
}
|
|
for (uint32_t i = 0; i < no_sources; i++) {
|
|
const auto& the_name = std::string(p_sources[i].p_ndi_name);
|
|
if (the_name == magic_string) {
|
|
stream_offset = static_cast<int>(i);
|
|
std::cout << "selecting the stream [" << the_name << "] with the index = " << i << std::endl;
|
|
break;
|
|
} else {
|
|
std::cout << "the stream [" << the_name << "] noes not match. index = " << i << std::endl;
|
|
}
|
|
}
|
|
if (stream_offset >= 0) {
|
|
std::cout << "found the stream" << std::endl;
|
|
break;
|
|
}
|
|
std::cout << "no such stream" << std::endl;
|
|
}
|
|
|
|
// We now have at least one source, so we create a receiver to look at it.
|
|
NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v3();
|
|
if (!pNDI_recv) {
|
|
return 0;
|
|
}
|
|
|
|
// Connect to our sources
|
|
NDIlib_recv_connect(pNDI_recv, p_sources + stream_offset);
|
|
|
|
// Destroy the NDI finder. We needed to have access to the pointers to p_sources[0]
|
|
NDIlib_find_destroy(pNDI_find);
|
|
|
|
// Run for one minute
|
|
using namespace std::chrono;
|
|
int counter = 0;
|
|
cv::namedWindow("test2");
|
|
while (!is_terminated) {
|
|
counter += 1;
|
|
// The descriptors
|
|
NDIlib_video_frame_v2_t video_frame;
|
|
// NDIlib_audio_frame_v3_t audio_frame;
|
|
NDIlib_metadata_frame_t metadata_frame;
|
|
const auto& timeout = 5000;
|
|
|
|
const auto& ret = NDIlib_recv_capture_v3(pNDI_recv, &video_frame, nullptr, &metadata_frame, timeout);
|
|
switch (ret) {
|
|
case NDIlib_frame_type_none: { // No data
|
|
printf("No data received.\n");
|
|
break;
|
|
}
|
|
case NDIlib_frame_type_video: { // Video data
|
|
printf("%d -- Video data received (%dx%d).\n", counter, video_frame.xres, video_frame.yres);
|
|
std::cout << "timestand & timecode: " << video_frame.timestamp << " " << video_frame.timecode << std::endl;
|
|
std::cout << "The frame format type is: " << video_frame.frame_format_type << std::endl;
|
|
std::cout << "The type of data in the frame is: " << video_frame.FourCC << std::endl;
|
|
int len = 0;
|
|
if (video_frame.p_metadata) {
|
|
for (int i = 0; ; ++i) {
|
|
// std::cout << i << std::endl;
|
|
if (video_frame.p_metadata[i] == 0) {
|
|
len = i;
|
|
break;
|
|
}
|
|
}
|
|
std::cout << "metadata len = " << len << " "
|
|
<< std::string(video_frame.p_metadata)
|
|
<< std::endl;
|
|
} else {
|
|
std::cout << "video_frame.p_metadata is nullptr" << std::endl;
|
|
std::cout << "metadata len = " << len << std::endl;
|
|
}
|
|
auto p_data = video_frame.p_data;
|
|
std::cout << "The frame format type is: " << video_frame.frame_format_type << std::endl;
|
|
cv::Mat frame(video_frame.yres, video_frame.xres, CV_8UC4, p_data);
|
|
std::cout << "The frame dimensions: " << frame.cols << " " << frame.rows << std::endl;
|
|
cv::imshow("test2", frame);
|
|
cv::waitKey(0);
|
|
|
|
std::cout << "space (video frame)" << std::endl;
|
|
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
|
|
break;
|
|
}
|
|
// case NDIlib_frame_type_audio: { // Audio data
|
|
// printf("Audio data received (%d samples).\n", audio_frame.no_samples);
|
|
// NDIlib_recv_free_audio_v3(pNDI_recv, &audio_frame);
|
|
// break;
|
|
// }
|
|
case NDIlib_frame_type_metadata: {
|
|
printf("Metadata received (%d length).\n", metadata_frame.length);
|
|
std::cout << std::string(metadata_frame.p_data) << std::endl;
|
|
std::cout << "space (metadata)" << std::endl;
|
|
NDIlib_recv_free_metadata(pNDI_recv, &metadata_frame);
|
|
break;
|
|
}
|
|
default:
|
|
printf("other case");
|
|
std::cout << ret << std::endl;
|
|
}
|
|
}
|
|
|
|
// Destroy the receiver
|
|
NDIlib_recv_destroy(pNDI_recv);
|
|
|
|
// Not required, but nice
|
|
NDIlib_destroy();
|
|
|
|
// Finished
|
|
return 0;
|
|
}
|