235 lines
9.6 KiB
C++
235 lines
9.6 KiB
C++
#include <cstdio>
|
|
#include <cstdlib>
|
|
#include <cstring>
|
|
#include <chrono>
|
|
#include <iostream>
|
|
#include <csignal>
|
|
#include <sstream>
|
|
#include <fstream>
|
|
#include <vector>
|
|
#include <iomanip>
|
|
#include <Processing.NDI.Lib.h>
|
|
|
|
#include "opencv2/opencv.hpp"
|
|
|
|
using namespace std::chrono;
|
|
|
|
#ifdef _WIN32
|
|
#ifdef _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
|
|
#else // _WIN64
|
|
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
|
|
#endif // _WIN64
|
|
#endif // _WIN32
|
|
|
|
static bool is_terminated = false;
|
|
|
|
void signalHandler(int signum) {
|
|
// Ctrl+C -> SIGINT
|
|
// SystemD -> SIGTERM
|
|
std::cout << "Interrupt signal (" << signum << ") received.";
|
|
is_terminated = true;
|
|
};
|
|
|
|
void LoadImages(const std::string &strPathToSequence, std::vector<std::string> &vstrImageLeft,
|
|
std::vector<std::string> &vstrImageRight, std::vector<double> &vTimestamps)
|
|
{
|
|
std::ifstream fTimes;
|
|
std::string strPathTimeFile = strPathToSequence + "/times.txt";
|
|
fTimes.open(strPathTimeFile.c_str());
|
|
while(!fTimes.eof())
|
|
{
|
|
std::string s;
|
|
getline(fTimes,s);
|
|
if(!s.empty())
|
|
{
|
|
std::stringstream ss;
|
|
ss << s;
|
|
double t;
|
|
ss >> t;
|
|
vTimestamps.push_back(t);
|
|
}
|
|
}
|
|
|
|
std::string strPrefixLeft = strPathToSequence + "/image_0/";
|
|
std::string strPrefixRight = strPathToSequence + "/image_1/";
|
|
|
|
const int nTimes = vTimestamps.size();
|
|
vstrImageLeft.resize(nTimes);
|
|
vstrImageRight.resize(nTimes);
|
|
|
|
for(int i=0; i<nTimes; i++)
|
|
{
|
|
std::stringstream ss;
|
|
ss << std::setfill('0') << std::setw(6) << i;
|
|
vstrImageLeft[i] = strPrefixLeft + ss.str() + ".png";
|
|
vstrImageRight[i] = strPrefixRight + ss.str() + ".png";
|
|
}
|
|
std::cout << "Images are loaded!" << std::endl;
|
|
}
|
|
|
|
int main(int argc, char* argv[]) {
|
|
signal(SIGINT, signalHandler);
|
|
signal(SIGTERM, signalHandler);
|
|
|
|
// Retrieve paths to images
|
|
std::vector<std::string> vstrImageLeft;
|
|
std::vector<std::string> vstrImageRight;
|
|
std::vector<double> vTimestamps;
|
|
LoadImages(std::string(argv[1]), vstrImageLeft, vstrImageRight, vTimestamps);
|
|
|
|
// Not required, but "correct" (see the SDK documentation.
|
|
if (!NDIlib_initialize()) {
|
|
return 0;
|
|
}
|
|
|
|
// We create the NDI sender
|
|
NDIlib_send_instance_t pNDI_send = NDIlib_send_create();
|
|
if (!pNDI_send) return 0;
|
|
|
|
int64_t total_frames_counter = 0;
|
|
cv::namedWindow("sourceTest");
|
|
while (!is_terminated) {
|
|
// Get the current time
|
|
const auto start_send = high_resolution_clock::now();
|
|
|
|
NDIlib_video_frame_v2_t ndi_video_frame;
|
|
ndi_video_frame.xres = 2452;
|
|
ndi_video_frame.yres = 370;
|
|
ndi_video_frame.FourCC = NDIlib_FourCC_type_BGRX;
|
|
ndi_video_frame.p_data = (uint8_t*)malloc(ndi_video_frame.xres*ndi_video_frame.yres * 4 * sizeof(uint8_t));
|
|
|
|
// Send 200 frames
|
|
for (int idx = 0; idx < (int)vstrImageLeft.size(); idx++) {
|
|
// Fill in the buffer. It is likely that you would do something much smarter than this.
|
|
cv::Mat imLeft = cv::imread(vstrImageLeft[idx], cv::IMREAD_GRAYSCALE);
|
|
std::printf("imLeft is loaded! Width: %d, Height: %d \n", imLeft.cols, imLeft.rows);
|
|
cv::Mat imRight = cv::imread(vstrImageRight[idx], cv::IMREAD_GRAYSCALE);
|
|
cv::cvtColor(imLeft, imLeft, cv::COLOR_GRAY2BGRA);
|
|
cv::cvtColor(imRight, imRight, cv::COLOR_GRAY2BGRA);
|
|
cv::Mat frame(imLeft.rows, imLeft.cols*2, CV_8UC4);
|
|
std::printf("Frame width: %d, height: %d \n", frame.cols, frame.rows);
|
|
cv::Rect leftROI(0, 0, imLeft.cols, imLeft.rows);
|
|
cv::Rect rightROI(imLeft.cols, 0, imLeft.cols, imLeft.rows);
|
|
std::printf("Cropped frame width: %d, height %d \n", frame(leftROI).cols, frame(leftROI).rows);
|
|
std::printf("RightROI width: %d, height %d \n", rightROI.width, rightROI.height);
|
|
imLeft.copyTo(frame(leftROI));
|
|
imRight.copyTo(frame(rightROI));
|
|
|
|
if (frame.type() == CV_8UC4){
|
|
std::cout << "The frame format is CV_8UC4" << std::endl;
|
|
}
|
|
else{
|
|
std::cout << "The frame format is not CV_8UC4" << std::endl;
|
|
}
|
|
std::cout << "Frame dimensions are: " << frame.cols << " " << frame.rows << " Channels: " << frame.channels() << std::endl;
|
|
cv::imshow("sourceTest", frame);
|
|
cv::waitKey(0);
|
|
|
|
uint8_t* p_data;
|
|
cv::Mat frame_clone;
|
|
if (frame.isContinuous()){
|
|
std::cout << "The frame is continuous. Proceed to copying" << std::endl;
|
|
frame_clone = frame.clone();
|
|
p_data = (uint8_t*)frame_clone.data;
|
|
}
|
|
|
|
std::cout << "Testing the value (1,2) in the frame: " << frame.at<cv::Vec4b>(1, 2) << std::endl;
|
|
std::cout << "The image contents are: " << std::endl;
|
|
for (int i = 0; i < 2; i++){
|
|
for (int j = 0; j < frame.cols; j++){
|
|
std::cout << frame.at<cv::Vec4b>(i, j) << " ";
|
|
}
|
|
std::cout << std::endl;
|
|
}
|
|
|
|
int xres = imLeft.cols*2;
|
|
std::printf("xres is: %d \n", xres);
|
|
int yres = imLeft.rows;
|
|
std::printf("yres is: %d \n", yres);
|
|
double timestamp = vTimestamps[idx];
|
|
|
|
std::string metadata_string = "<metadata_string " + std::to_string(total_frames_counter) + " timestamp: " + std::to_string(timestamp) + ">";
|
|
// NDIlib_video_frame_v2_t ndi_video_frame(
|
|
// xres, yres, NDIlib_FourCC_type_RGBX,
|
|
// 30000, 1001, (float)yres / (float)xres,
|
|
// NDIlib_frame_format_type_progressive,
|
|
// 0,
|
|
// (uint8_t*) malloc(xres * yres * 4 * sizeof(uint8_t)),
|
|
// 0,
|
|
// metadata_string.c_str()
|
|
// );
|
|
|
|
|
|
// std::cout << "hi there" << std::endl;
|
|
// std::cout << "xres = " << ndi_video_frame.xres << std::endl;
|
|
// std::cout << "yres = " << ndi_video_frame.yres << std::endl;
|
|
// std::cout << "frame_rate_N = " << ndi_video_frame.frame_rate_N << std::endl;
|
|
// std::cout << "frame_rate_D = " << ndi_video_frame.frame_rate_D << std::endl;
|
|
// std::cout << "picture_aspect_ratio = " << ndi_video_frame.picture_aspect_ratio << std::endl;
|
|
// std::cout << "frame_format_type = " << ndi_video_frame.frame_format_type << std::endl;
|
|
// std::cout << "timecode = " << ndi_video_frame.timecode << std::endl;
|
|
// std::cout << "line_stride_in_bytes = " << ndi_video_frame.line_stride_in_bytes << std::endl;
|
|
// std::cout << "p_metadata = " << ndi_video_frame.p_metadata << std::endl;
|
|
// std::cout << "timestamp = " << ndi_video_frame.timestamp << std::endl;
|
|
std::cout << "The frame format type is: " << ndi_video_frame.frame_format_type << std::endl;
|
|
|
|
//memset((void*)ndi_video_frame.p_data, , ndi_video_frame.xres * ndi_video_frame.yres * 4);
|
|
// Fill in the buffer. It is likely that you would do something much smarter than this.
|
|
// memset((void*)ndi_video_frame.p_data, p_data, ndi_video_frame.xres*ndi_video_frame.yres * 4);
|
|
// uchar numPixels = (uchar)windowWidth * (Uint32)windowHeight;
|
|
// uchar color = (uchar)0xFFCC00FF;
|
|
|
|
// uchar color = 255;
|
|
// for (int i = 0; i < yres; ++i)
|
|
// {
|
|
// for (int j = 0; j < xres; ++j){
|
|
// memcpy(ndi_video_frame.p_data + (i+j) * sizeof(uchar), p_data + (i+j) * sizeof(uchar), sizeof(color));
|
|
//
|
|
// }
|
|
// }
|
|
auto memsize = frame.cols * frame.rows * frame.channels() * sizeof(uint8_t);
|
|
uint8_t* p_data2 = frame.data;
|
|
std::cout << std::endl << p_data2[0] << std::endl;
|
|
std::cout << "The frame converted to uint8_t* array: " << std::endl;
|
|
for (int i = 0; i < 2; i++){
|
|
for (int j = 0; j < frame.cols; j++){
|
|
std::cout << (int)p_data2[i*frame.cols + j] << " ";
|
|
}
|
|
std::cout << std::endl;
|
|
}
|
|
std::cout << std::endl;
|
|
|
|
memcpy(ndi_video_frame.p_data, frame.clone().data, memsize);
|
|
// ndi_video_frame.p_data = p_data;
|
|
ndi_video_frame.timestamp = total_frames_counter;
|
|
ndi_video_frame.timecode = total_frames_counter;
|
|
|
|
// memset((void*)ndi_video_frame.p_data, (idx & 1) ? 255 : 0, ndi_video_frame.xres*ndi_video_frame.yres * 4);
|
|
|
|
// We now submit the frame. Note that this call will be clocked so that we end up submitting at exactly 29.97fps.
|
|
NDIlib_send_send_video_v2(pNDI_send, &ndi_video_frame);
|
|
std::printf("Frame is successfully sent \n");
|
|
total_frames_counter += 1;
|
|
|
|
// free((void *) ndi_video_frame.p_metadata);
|
|
// Free the video frame
|
|
// NDIlib_recv_free_video(pNDI_recv, &video_frame);
|
|
}
|
|
free(ndi_video_frame.p_data);
|
|
// Just display something helpful
|
|
printf("All frames sent, at %1.2ffps\n", (double)vstrImageLeft.size() / duration_cast<duration<float>>(high_resolution_clock::now() - start_send).count());
|
|
}
|
|
|
|
|
|
// Destroy the NDI sender
|
|
NDIlib_send_destroy(pNDI_send);
|
|
|
|
// Not required, but nice
|
|
NDIlib_destroy();
|
|
|
|
// Success
|
|
return 0;
|
|
}
|
|
|