diff --git a/NDIlib_Recv.cpp b/NDIlib_Recv.cpp index a2a4a4b..154c150 100644 --- a/NDIlib_Recv.cpp +++ b/NDIlib_Recv.cpp @@ -30,9 +30,8 @@ int main(int argc, char* argv[]) } // We now have at least one source, so we create a receiver to look at it. - NDIlib_recv_create_v3_t pNDI_recv_parameters; - pNDI_recv_parameters.color_format = NDIlib_recv_color_format_BGRX_BGRA; - NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v3(&pNDI_recv_parameters); + NDIlib_recv_create_t pNDI_recv_parameters = { *p_sources, NDIlib_recv_color_format_e_BGRX_BGRA, NDIlib_recv_bandwidth_highest, true }; + NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v2(&pNDI_recv_parameters); if (!pNDI_recv) return 0; // Connect to our sources @@ -47,9 +46,9 @@ int main(int argc, char* argv[]) for (const auto start = high_resolution_clock::now(); high_resolution_clock::now() - start < minutes(5);) { // The descriptors NDIlib_video_frame_v2_t video_frame; - NDIlib_audio_frame_v2_t audio_frame; +// NDIlib_audio_frame_v2_t audio_frame; - switch (NDIlib_recv_capture_v2(pNDI_recv, &video_frame, &audio_frame, nullptr, 5000)) + switch (NDIlib_recv_capture_v2(pNDI_recv, &video_frame, NULL, NULL, 1000)) { // No data case NDIlib_frame_type_none: printf("No data received.\n"); @@ -60,17 +59,27 @@ int main(int argc, char* argv[]) printf("Video data received (%dx%d).\n", video_frame.xres, video_frame.yres); if (video_frame.p_data != nullptr){ // memcpy(p_data, (uchar*)video_frame.p_data, video_frame.xres * video_frame.yres * 2 * sizeof(uchar)); + // A bit of testing of the received data. + std::cout << "The first 100 elements received: " << std::endl; for (int i = 0; i < 100; i++){ - std::cout << "[ " << (int)video_frame.p_data[i*video_frame.xres*4] << " "; - std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 1] << " "; - std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 2] << " "; - std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 3] << " ] "; + std::cout << (int)video_frame.p_data[i] << " "; } std::cout << std::endl; - cv::Mat frame(video_frame.yres, video_frame.xres, CV_8UC4, video_frame.p_data, cv::Mat::AUTO_STEP); + // A bit of testing of the received data. + for (int i = 0; i < 100; i++){ + std::cout << "[ " << (int)video_frame.p_data[i*video_frame.xres*3] << " "; + std::cout << (int)video_frame.p_data[i*video_frame.xres*3 + 1] << " "; + std::cout << (int)video_frame.p_data[i*video_frame.xres*3 + 2] << " ] "; +// std::cout << (int)video_frame.p_data[i*video_frame.xres*4 + 3] << " ] "; + } std::cout << std::endl; + +// cv::Mat frame(video_frame.yres, video_frame.xres, CV_8UC4, video_frame.p_data); + cv::Mat frame = cv::Mat::zeros(cv::Size(video_frame.xres, video_frame.yres), CV_8UC4); + std::cout << std::endl; + memcpy(frame.data, video_frame.p_data, video_frame.xres * video_frame.yres * 4); for (int i = 0; i < 100; i++){ std::cout << frame.at(0, i) << " "; } @@ -89,8 +98,8 @@ int main(int argc, char* argv[]) break; // Audio data case NDIlib_frame_type_audio: - printf("Audio data received (%d samples).\n", audio_frame.no_samples); - NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame); +// printf("Audio data received (%d samples).\n", audio_frame.no_samples); +// NDIlib_recv_free_audio_v2(pNDI_recv, &audio_frame); break; } } diff --git a/NDIlib_Send_Video.cpp b/NDIlib_Send_Video.cpp index 8d0c695..0d12bc2 100644 --- a/NDIlib_Send_Video.cpp +++ b/NDIlib_Send_Video.cpp @@ -71,12 +71,13 @@ int main(int argc, char* argv[]) NDI_video_frame.yres = 370; NDI_video_frame.frame_rate_N = 30000; NDI_video_frame.frame_rate_D = 1001; - NDI_video_frame.picture_aspect_ratio = 1226.0f/360.0f; + NDI_video_frame.picture_aspect_ratio = 0.0; NDI_video_frame.timecode = 0; - NDI_video_frame.frame_format_type = NDIlib_frame_format_type_progressive; + // What is interleaved? + NDI_video_frame.frame_format_type = NDIlib_frame_format_type_interleaved; NDI_video_frame.FourCC = NDIlib_FourCC_type_BGRX; - NDI_video_frame.line_stride_in_bytes = NDI_video_frame.xres * 4; - NDI_video_frame.p_data = (uint8_t*)malloc(NDI_video_frame.xres*NDI_video_frame.yres * 4); + NDI_video_frame.line_stride_in_bytes = NDI_video_frame.xres * 3; + NDI_video_frame.p_data = (uint8_t*)malloc(NDI_video_frame.xres*NDI_video_frame.yres * 3); NDI_video_frame.p_metadata = ""; // std::cout << "The size of char: " << sizeof(uchar) << " The size of uint8_t: " << sizeof(uint8_t) << std::endl; @@ -121,8 +122,8 @@ int main(int argc, char* argv[]) // New version cv::Mat imLeft = cv::imread(vstrImageLeft[idx], cv::IMREAD_COLOR); std::printf("imLeft is loaded! Width: %d, Height: %d \n", imLeft.cols, imLeft.rows); - cv::Mat imRight = cv::imread(vstrImageRight[idx], cv::IMREAD_GRAYSCALE); - cv::cvtColor(imLeft, imLeft, cv::COLOR_BGR2BGRA, 4); + cv::Mat imRight = cv::imread(vstrImageRight[idx], cv::IMREAD_COLOR); +// cv::cvtColor(imLeft, imLeft, cv::COLOR_BGR2BGRA, 4); if (imLeft.isContinuous()){ std::cout << "The imLeft is Continuous." << std::endl; } @@ -131,8 +132,8 @@ int main(int argc, char* argv[]) // if (c==27){ // break; // } - cv::cvtColor(imRight, imRight, cv::COLOR_GRAY2BGRA); - cv::Mat frame(imLeft.rows, imLeft.cols*2, CV_8UC4); +// cv::cvtColor(imRight, imRight, cv::COLOR_GRAY2BGRA); + cv::Mat frame(imLeft.rows, imLeft.cols*2, CV_8UC3); std::printf("Frame width: %d, height: %d \n", frame.cols, frame.rows); cv::Rect leftROI(0, 0, imLeft.cols, imLeft.rows); cv::Rect rightROI(imLeft.cols, 0, imLeft.cols, imLeft.rows); @@ -143,29 +144,30 @@ int main(int argc, char* argv[]) std::cout << std::endl; for (int i = 0; i < 100; i++){ - std::cout << imLeft.at(i, 0) << " "; + std::cout << imLeft.at(i, 0) << " "; } std::cout << std::endl; std::cout << "From the array first elements of the rows: " << std::endl; for (int i = 0; i < 100; i++){ - std::cout << "[ " << (int)imLeft.data[i*imLeft.cols*4] << ", "; - std::cout << (int)imLeft.data[i*imLeft.cols*4 + 1] << ", "; - std::cout << (int)imLeft.data[i*imLeft.cols*4 + 2] << ", "; - std::cout << (int)imLeft.data[i*imLeft.cols*4 + 3] << " ] "; + std::cout << "[ " << (int)imLeft.data[i*imLeft.cols*3] << ", "; + std::cout << (int)imLeft.data[i*imLeft.cols*3 + 1] << ", "; + std::cout << (int)imLeft.data[i*imLeft.cols*3 + 2] << ", "; +// std::cout << (int)imLeft.data[i*imLeft.cols*4 + 3] << " ] "; } std::cout << std::endl; // Fill in the buffer. It is likely that you would do something much smarter than this. // memset((void*)NDI_video_frame.p_data, (idx & 1) ? 255 : 0, NDI_video_frame.xres*NDI_video_frame.yres * 4 * sizeof(uint8_t)); - memcpy(NDI_video_frame.p_data, imLeft.ptr(0, 0), NDI_video_frame.xres * NDI_video_frame.yres * 4 * sizeof(char)); + // Why used void* the guy in GitHub? + memcpy((void*)NDI_video_frame.p_data, imLeft.ptr(0, 0), NDI_video_frame.xres * NDI_video_frame.yres * 3); std::cout << std::endl << "From the video_frame.p_data array first elements of the rows: " << std::endl; for (int i = 0; i < 100; i++){ - std::cout << "[ " << (int)NDI_video_frame.p_data[i*imLeft.cols*4] << ", "; - std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*4 + 1] << ", "; - std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*4 + 2] << ", "; - std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*4 + 3] << " ] "; + std::cout << "[ " << (int)NDI_video_frame.p_data[i*imLeft.cols*3] << ", "; + std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*3 + 1] << ", "; + std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*3 + 2] << ", "; +// std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*4 + 3] << " ] "; } std::cout << std::endl; diff --git a/cmake-build-debug/NDIlib_Recv b/cmake-build-debug/NDIlib_Recv index 8bb9db5..a8426a6 100755 Binary files a/cmake-build-debug/NDIlib_Recv and b/cmake-build-debug/NDIlib_Recv differ diff --git a/cmake-build-debug/NDIlib_Send_Video b/cmake-build-debug/NDIlib_Send_Video index ea870e7..d9278db 100755 Binary files a/cmake-build-debug/NDIlib_Send_Video and b/cmake-build-debug/NDIlib_Send_Video differ