196 lines
7.7 KiB
C++
196 lines
7.7 KiB
C++
#include <cstdio>
|
|
#include <cstdlib>
|
|
#include <cstring>
|
|
#include <chrono>
|
|
#include <fstream>
|
|
#include <Processing.NDI.Lib.h>
|
|
#include <opencv2/opencv.hpp>
|
|
|
|
#ifdef _WIN32
|
|
#ifdef _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
|
|
#else // _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
|
|
#endif // _WIN64
|
|
#endif // _WIN32
|
|
|
|
void LoadImages(const std::string &strPathToSequence, std::vector<std::string> &vstrImageLeft,
|
|
std::vector<std::string> &vstrImageRight, std::vector<double> &vTimestamps)
|
|
{
|
|
std::ifstream fTimes;
|
|
std::string strPathTimeFile = strPathToSequence + "/times.txt";
|
|
fTimes.open(strPathTimeFile.c_str());
|
|
while(!fTimes.eof())
|
|
{
|
|
std::string s;
|
|
getline(fTimes,s);
|
|
if(!s.empty())
|
|
{
|
|
std::stringstream ss;
|
|
ss << s;
|
|
double t;
|
|
ss >> t;
|
|
vTimestamps.push_back(t);
|
|
}
|
|
}
|
|
|
|
std::string strPrefixLeft = strPathToSequence + "/image_0/";
|
|
std::string strPrefixRight = strPathToSequence + "/image_1/";
|
|
|
|
const int nTimes = vTimestamps.size();
|
|
vstrImageLeft.resize(nTimes);
|
|
vstrImageRight.resize(nTimes);
|
|
|
|
for(int i=0; i<nTimes; i++)
|
|
{
|
|
std::stringstream ss;
|
|
ss << std::setfill('0') << std::setw(6) << i;
|
|
vstrImageLeft[i] = strPrefixLeft + ss.str() + ".png";
|
|
vstrImageRight[i] = strPrefixRight + ss.str() + ".png";
|
|
}
|
|
std::cout << "Images are loaded!" << std::endl;
|
|
}
|
|
|
|
int main(int argc, char* argv[])
|
|
{ // Not required, but "correct" (see the SDK documentation.
|
|
if (!NDIlib_initialize()) return 0;
|
|
|
|
// Retrieve paths to images
|
|
std::vector<std::string> vstrImageLeft;
|
|
std::vector<std::string> vstrImageRight;
|
|
std::vector<double> vTimestamps;
|
|
LoadImages(std::string(argv[1]), vstrImageLeft, vstrImageRight, vTimestamps);
|
|
|
|
// We create the NDI sender
|
|
NDIlib_send_instance_t pNDI_send = NDIlib_send_create();
|
|
if (!pNDI_send) return 0;
|
|
|
|
// We are going to create a 1920x1080 interlaced frame at 29.97Hz.
|
|
NDIlib_video_frame_v2_t NDI_video_frame;
|
|
NDI_video_frame.xres = 1226;
|
|
NDI_video_frame.yres = 370;
|
|
NDI_video_frame.frame_rate_N = 30000;
|
|
NDI_video_frame.frame_rate_D = 1001;
|
|
NDI_video_frame.picture_aspect_ratio = 0.0;
|
|
NDI_video_frame.timecode = 0;
|
|
// What is interleaved?
|
|
NDI_video_frame.frame_format_type = NDIlib_frame_format_type_interleaved;
|
|
NDI_video_frame.FourCC = NDIlib_FourCC_type_BGRX;
|
|
NDI_video_frame.line_stride_in_bytes = NDI_video_frame.xres * 3;
|
|
NDI_video_frame.p_data = (uint8_t*)malloc(NDI_video_frame.xres*NDI_video_frame.yres * 3);
|
|
NDI_video_frame.p_metadata = "<Hello/>";
|
|
|
|
// std::cout << "The size of char: " << sizeof(uchar) << " The size of uint8_t: " << sizeof(uint8_t) << std::endl;
|
|
|
|
// Run for one minute
|
|
using namespace std::chrono;
|
|
for (const auto start = high_resolution_clock::now(); high_resolution_clock::now() - start < minutes(5);)
|
|
{ // Get the current time
|
|
const auto start_send = high_resolution_clock::now();
|
|
|
|
// Send 200 frames
|
|
for (int idx = 200; idx; idx--)
|
|
{
|
|
// Prev version
|
|
// cv::Mat randImg(600, 600, CV_8UC4);
|
|
// cv::randu(randImg, cv::Scalar(0, 0, 0, 255), cv::Scalar(255, 255, 255, 255));
|
|
// for (int i = 0; i < 200; i++){
|
|
// for (int j = 0; j < 200; j++){
|
|
// cv::Vec4b value(0, 0, 255, 255);
|
|
// randImg.at<cv::Vec4b>(i, j) = value;
|
|
// }
|
|
// }
|
|
// for (int i = 200; i < 400; i++){
|
|
// for (int j = 200; j < 400; j++){
|
|
// cv::Vec4b value(0, 255, 0, 255);
|
|
// randImg.at<cv::Vec4b>(i, j) = value;
|
|
// }
|
|
// }
|
|
// for (int i = 400; i < 600; i++){
|
|
// for (int j = 400; j < 600; j++){
|
|
// cv::Vec4b value(255, 0, 0, 255);
|
|
// randImg.at<cv::Vec4b>(i, j) = value;
|
|
// }
|
|
// }
|
|
// cv::imshow("gen img", randImg);
|
|
// char c = (char)cv::waitKey(0);
|
|
// if (c==27){
|
|
// break;
|
|
// }
|
|
// cv::Mat imLeft(randImg.rows, randImg.cols, CV_8UC4, randImg.data, cv::Mat::AUTO_STEP);
|
|
|
|
// New version
|
|
cv::Mat imLeft = cv::imread(vstrImageLeft[idx], cv::IMREAD_COLOR);
|
|
std::printf("imLeft is loaded! Width: %d, Height: %d \n", imLeft.cols, imLeft.rows);
|
|
cv::Mat imRight = cv::imread(vstrImageRight[idx], cv::IMREAD_COLOR);
|
|
// cv::cvtColor(imLeft, imLeft, cv::COLOR_BGR2BGRA, 4);
|
|
if (imLeft.isContinuous()){
|
|
std::cout << "The imLeft is Continuous." << std::endl;
|
|
}
|
|
cv::imshow("sending img", imLeft);
|
|
char c = (char)cv::waitKey(0);
|
|
// if (c==27){
|
|
// break;
|
|
// }
|
|
// cv::cvtColor(imRight, imRight, cv::COLOR_GRAY2BGRA);
|
|
cv::Mat frame(imLeft.rows, imLeft.cols*2, CV_8UC3);
|
|
std::printf("Frame width: %d, height: %d \n", frame.cols, frame.rows);
|
|
cv::Rect leftROI(0, 0, imLeft.cols, imLeft.rows);
|
|
cv::Rect rightROI(imLeft.cols, 0, imLeft.cols, imLeft.rows);
|
|
std::printf("Cropped frame width: %d, height %d \n", frame(leftROI).cols, frame(leftROI).rows);
|
|
std::printf("RightROI width: %d, height %d \n", rightROI.width, rightROI.height);
|
|
imLeft.copyTo(frame(leftROI));
|
|
imRight.copyTo(frame(rightROI));
|
|
|
|
std::cout << std::endl;
|
|
for (int i = 0; i < 100; i++){
|
|
std::cout << imLeft.at<cv::Vec3b>(i, 0) << " ";
|
|
}
|
|
std::cout << std::endl;
|
|
|
|
std::cout << "From the array first elements of the rows: " << std::endl;
|
|
for (int i = 0; i < 100; i++){
|
|
std::cout << "[ " << (int)imLeft.data[i*imLeft.cols*3] << ", ";
|
|
std::cout << (int)imLeft.data[i*imLeft.cols*3 + 1] << ", ";
|
|
std::cout << (int)imLeft.data[i*imLeft.cols*3 + 2] << ", ";
|
|
// std::cout << (int)imLeft.data[i*imLeft.cols*4 + 3] << " ] ";
|
|
}
|
|
std::cout << std::endl;
|
|
|
|
// Fill in the buffer. It is likely that you would do something much smarter than this.
|
|
// memset((void*)NDI_video_frame.p_data, (idx & 1) ? 255 : 0, NDI_video_frame.xres*NDI_video_frame.yres * 4 * sizeof(uint8_t));
|
|
// Why used void* the guy in GitHub?
|
|
memcpy((void*)NDI_video_frame.p_data, imLeft.ptr(0, 0), NDI_video_frame.xres * NDI_video_frame.yres * 3);
|
|
|
|
std::cout << std::endl << "From the video_frame.p_data array first elements of the rows: " << std::endl;
|
|
for (int i = 0; i < 100; i++){
|
|
std::cout << "[ " << (int)NDI_video_frame.p_data[i*imLeft.cols*3] << ", ";
|
|
std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*3 + 1] << ", ";
|
|
std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*3 + 2] << ", ";
|
|
// std::cout << (int)NDI_video_frame.p_data[i*imLeft.cols*4 + 3] << " ] ";
|
|
}
|
|
std::cout << std::endl;
|
|
|
|
// We now submit the frame. Note that this call will be clocked so that we end up submitting at exactly 29.97fps.
|
|
NDIlib_send_send_video_v2(pNDI_send, &NDI_video_frame);
|
|
|
|
}
|
|
|
|
// Just display something helpful
|
|
printf("200 frames sent, at %1.2ffps\n", 200.0f / duration_cast<duration<float>>(high_resolution_clock::now() - start_send).count());
|
|
}
|
|
|
|
// Free the video frame
|
|
free(NDI_video_frame.p_data);
|
|
|
|
// Destroy the NDI sender
|
|
NDIlib_send_destroy(pNDI_send);
|
|
|
|
// Not required, but nice
|
|
NDIlib_destroy();
|
|
|
|
// Success
|
|
return 0;
|
|
}
|
|
|