Files
cv_networking_pipeline_linux/NDIlib_Recv.cpp
Ivan 6fc9ff1ae3 v3
2022-06-09 16:13:32 +03:00

107 lines
4.0 KiB
C++

#include <cstdio>
#include <chrono>
#include <Processing.NDI.Lib.h>
#include <opencv2/opencv.hpp>
#ifdef _WIN32
#ifdef _WIN64
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
#else // _WIN64
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
#endif // _WIN64
#endif // _WIN32
int main(int argc, char* argv[])
{ // Not required, but "correct" (see the SDK documentation.
if (!NDIlib_initialize()) return 0;
// Create a finder
NDIlib_find_instance_t pNDI_find = NDIlib_find_create_v2();
if (!pNDI_find) return 0;
// Wait until there is one source
uint32_t no_sources = 0;
const NDIlib_source_t* p_sources = NULL;
while (!no_sources)
{ // Wait until the sources on the nwtork have changed
printf("Looking for sources ...\n");
NDIlib_find_wait_for_sources(pNDI_find, 1000/* One second */);
p_sources = NDIlib_find_get_current_sources(pNDI_find, &no_sources);
}
// We now have at least one source, so we create a receiver to look at it.
NDIlib_recv_create_v3_t pNDI_recv_parameters;
pNDI_recv_parameters.color_format = NDIlib_recv_color_format_BGRX_BGRA;
NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v3(&pNDI_recv_parameters);
if (!pNDI_recv) return 0;
// Connect to our sources
NDIlib_recv_connect(pNDI_recv, p_sources + 0);
// Destroy the NDI finder. We needed to have access to the pointers to p_sources[0]
NDIlib_find_destroy(pNDI_find);
// Run for one minute
using namespace std::chrono;
// uchar* p_data = (uchar*)malloc(1920 * 1080 * 2);
for (const auto start = high_resolution_clock::now(); high_resolution_clock::now() - start < minutes(5);)
{ // The descriptors
NDIlib_video_frame_v2_t video_frame;
NDIlib_audio_frame_v2_t audio_frame;
switch (NDIlib_recv_capture_v2(pNDI_recv, &video_frame, &audio_frame, nullptr, 5000))
{ // No data
case NDIlib_frame_type_none:
printf("No data received.\n");
break;
// Video data
case NDIlib_frame_type_video:
printf("Video data received (%dx%d).\n", video_frame.xres, video_frame.yres);
if (video_frame.p_data != nullptr){
// memcpy(p_data, (uchar*)video_frame.p_data, video_frame.xres * video_frame.yres * 2 * sizeof(uchar));
// A bit of testing of the received data.
for (int i = 0; i < 100; i++){
std::cout << "[ " << (int)video_frame.p_data[i*video_frame.xres*4] << " ";
std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 1] << " ";
std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 2] << " ";
std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 3] << " ] ";
}
std::cout << std::endl;
cv::Mat frame(video_frame.yres, video_frame.xres, CV_8UC4, video_frame.p_data, cv::Mat::AUTO_STEP);
std::cout << std::endl;
for (int i = 0; i < 100; i++){
std::cout << frame.at<cv::Vec4b>(0, i) << " ";
}
std::cout << std::endl;
// cv::cvtColor(frame, frame, cv::COLOR_YUV2BGR_UYVY, 0);
// memcpy(frame.data, video_frame.p_data, video_frame.xres * video_frame.yres * 4 * sizeof(uint8_t));
cv::imshow("received image", frame);
char c = (char)cv::waitKey(0);
// if (c==27){
// break;
// }
// free(p_data);
}
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
break;
// Audio data
case NDIlib_frame_type_audio:
printf("Audio data received (%d samples).\n", audio_frame.no_samples);
NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame);
break;
}
}
// Destroy the receiver
NDIlib_recv_destroy(pNDI_recv);
// Not required, but nice
NDIlib_destroy();
// Finished
return 0;
}