v0.1
This commit is contained in:
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
build/
|
||||||
|
basic-tutorial-1.cpp
|
||||||
|
basic-tutorial-2.cpp
|
||||||
|
basic-tutorial-3.cpp
|
||||||
|
basic-tutorial-4.cpp
|
||||||
|
basic-tutorial-8.cpp
|
||||||
238
CMakeLists.txt
Normal file
238
CMakeLists.txt
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
cmake_minimum_required(VERSION 3.12)
|
||||||
|
|
||||||
|
set(THIS_PROJECT "MyGst")
|
||||||
|
project(${THIS_PROJECT})
|
||||||
|
|
||||||
|
set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Release>:Release>")
|
||||||
|
|
||||||
|
MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})
|
||||||
|
|
||||||
|
set(CMAKE_CXX_STANDARD 17)
|
||||||
|
set(CMAKE_INCLUDE_CURRENT_DIR ON)
|
||||||
|
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||||
|
set(THIS_ROOT ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
|
||||||
|
message("THIS_ROOT = ${THIS_ROOT}")
|
||||||
|
|
||||||
|
##################### PkgConfig BEGIN
|
||||||
|
set(MY_GST_PATH $ENV{MY_GST_PATH})
|
||||||
|
if (DEFINED MY_GST_PATH)
|
||||||
|
message("Env variable MY_GST_PATH=[${MY_GST_PATH}]")
|
||||||
|
else()
|
||||||
|
message(FATAL_ERROR "Env variable MY_GST_PATH is NOT DEFINED. Falling back to defualt value")
|
||||||
|
# set(MY_GST_PATH /home/admin1/gst/bin)
|
||||||
|
# message("Now the value of of MY_NDI_SDK_PATH=[${MY_GST_PATH}]")
|
||||||
|
endif()
|
||||||
|
set(PKG_CONFIG_USE_CMAKE_PREFIX_PATH ON)
|
||||||
|
# set(ENV{PKG_CONFIG_PATH} "$ENV{PKG_CONFIG_PATH}:${MY_GST_PATH}/lib/pkgconfig")
|
||||||
|
find_package(PkgConfig REQUIRED)
|
||||||
|
if ( NOT (PKGCONFIG_FOUND))
|
||||||
|
message(FATAL_ERROR "Please Install PkgConfig: CMake will Exit")
|
||||||
|
endif()
|
||||||
|
##################### PkgConfig END
|
||||||
|
##################### OpenCV BEGIN
|
||||||
|
set(OpenCV_DIR C:/OpenCV3/build)
|
||||||
|
find_package(OpenCV 3.4.16 REQUIRED)
|
||||||
|
include_directories(${OpenCV_INCLUDE_DIRS})
|
||||||
|
##################### OpenCV END
|
||||||
|
##################### OSC BEGIN
|
||||||
|
# TODO: Build oscpack normally in the project's directory
|
||||||
|
set(OSC_DIR C:/Users/ivan/Source/Repos/oscpack_1_1_0)
|
||||||
|
set(OSC_LIB_DIR ${OSC_DIR}/build/Release)
|
||||||
|
|
||||||
|
include_directories(
|
||||||
|
${OSC_DIR}
|
||||||
|
${OSC_LIB_DIR})
|
||||||
|
|
||||||
|
IF(WIN32)
|
||||||
|
set(IpSystemTypePath ${OSC_DIR}/ip/win32)
|
||||||
|
set(LIBS ${LIBS} Ws2_32 winmm)
|
||||||
|
ELSE(WIN32)
|
||||||
|
set(IpSystemTypePath ${OSC_DIR}/ip/posix)
|
||||||
|
ENDIF(WIN32)
|
||||||
|
|
||||||
|
ADD_LIBRARY(oscpack
|
||||||
|
${OSC_DIR}/ip/IpEndpointName.h
|
||||||
|
${OSC_DIR}/ip/IpEndpointName.cpp
|
||||||
|
${OSC_DIR}/ip/NetworkingUtils.h
|
||||||
|
|
||||||
|
${IpSystemTypePath}/NetworkingUtils.cpp
|
||||||
|
|
||||||
|
${OSC_DIR}/ip/UdpSocket.h
|
||||||
|
${IpSystemTypePath}/UdpSocket.cpp
|
||||||
|
|
||||||
|
${OSC_DIR}/ip/PacketListener.h
|
||||||
|
${OSC_DIR}/ip/TimerListener.h
|
||||||
|
|
||||||
|
${OSC_DIR}/osc/OscTypes.h
|
||||||
|
${OSC_DIR}/osc/OscTypes.cpp
|
||||||
|
${OSC_DIR}/osc/OscHostEndianness.h
|
||||||
|
${OSC_DIR}/osc/OscException.h
|
||||||
|
${OSC_DIR}/osc/OscPacketListener.h
|
||||||
|
${OSC_DIR}/osc/MessageMappingOscPacketListener.h
|
||||||
|
${OSC_DIR}/osc/OscReceivedElements.h
|
||||||
|
${OSC_DIR}/osc/OscReceivedElements.cpp
|
||||||
|
${OSC_DIR}/osc/OscPrintReceivedElements.h
|
||||||
|
${OSC_DIR}/osc/OscPrintReceivedElements.cpp
|
||||||
|
${OSC_DIR}/osc/OscOutboundPacketStream.h
|
||||||
|
${OSC_DIR}/osc/OscOutboundPacketStream.cpp
|
||||||
|
)
|
||||||
|
|
||||||
|
target_link_libraries(oscpack
|
||||||
|
${OSC_LIB_DIR}/oscpack.lib)
|
||||||
|
|
||||||
|
##################### OSC END
|
||||||
|
##################### ORB_SLAM3 BEGIN
|
||||||
|
set(ORB_SLAM3_DIR C:/Users/ivan/Source/Repos/ORB_SLAM3)
|
||||||
|
set(Boost_INCLUDE_DIR C:/Users/ivan/Source/Repos/ORB-SLAM3forWindows/Thirdparty/boost_1_77_0)
|
||||||
|
set(Pangolin_DIR ${ORB_SLAM3_DIR}/Thirdparty/Pangolin/build)
|
||||||
|
set(Eigen3_DIR C:/Users/ivan/Source/Repos/ORB-SLAM3forWindows/Thirdparty/eigen/build)
|
||||||
|
|
||||||
|
find_package(Eigen3 3.1.0 REQUIRED)
|
||||||
|
find_package(Pangolin REQUIRED)
|
||||||
|
find_package(Boost)
|
||||||
|
find_package(realsense2)
|
||||||
|
|
||||||
|
include_directories(
|
||||||
|
${ORB_SLAM3_DIR}
|
||||||
|
${ORB_SLAM3_DIR}/include
|
||||||
|
${ORB_SLAM3_DIR}/include/CameraModels
|
||||||
|
${ORB_SLAM3_DIR}/Thirdparty/g2o
|
||||||
|
${EIGEN3_INCLUDE_DIR}
|
||||||
|
${Pangolin_INCLUDE_DIRS}
|
||||||
|
${Boost_INCLUDE_DIR}
|
||||||
|
${Boost_INCLUDE_DIR}/stage/lib
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
##################### ORB_SLAM3 END
|
||||||
|
##################### GStreamer BEGIN
|
||||||
|
# gstreamer-1.0
|
||||||
|
pkg_check_modules(GST REQUIRED gstreamer-1.0>=1.18.5)
|
||||||
|
include_directories("${GST_INCLUDE_DIRS}")
|
||||||
|
link_directories(BEFORE ${GST_LIBDIR})
|
||||||
|
# gstreamer-app-1.0
|
||||||
|
pkg_check_modules(GST_APP REQUIRED gstreamer-app-1.0>=1.18.5)
|
||||||
|
include_directories("${GST_APP_INCLUDE_DIRS}")
|
||||||
|
link_directories(BEFORE ${GST_APP_LIBDIR})
|
||||||
|
# gstreamer-audio-1.0
|
||||||
|
pkg_check_modules(GST_AUDIO REQUIRED gstreamer-audio-1.0>=1.18.5)
|
||||||
|
include_directories("${GST_AUDIO_INCLUDE_DIRS}")
|
||||||
|
link_directories(BEFORE ${GST_AUDIO_LIBDIR})
|
||||||
|
# gstreamer-video-1.0
|
||||||
|
pkg_check_modules(GST_VIDEO REQUIRED gstreamer-video-1.0>=1.18.5)
|
||||||
|
include_directories("${GST_VIDEO_INCLUDE_DIRS}")
|
||||||
|
link_directories(BEFORE ${GST_VIDEO_LIBDIR})
|
||||||
|
##################### GStreamer END
|
||||||
|
# dump everything
|
||||||
|
get_cmake_property(_variableNames VARIABLES)
|
||||||
|
list (SORT _variableNames)
|
||||||
|
foreach (_variableName ${_variableNames})
|
||||||
|
message(STATUS "${_variableName}=${${_variableName}}")
|
||||||
|
endforeach()
|
||||||
|
##################### NDI Begin
|
||||||
|
set(MY_NDI_SDK_PATH $ENV{NDI_SDK_DIR})
|
||||||
|
if (DEFINED MY_NDI_SDK_PATH)
|
||||||
|
message("Env variable MY_NDI_SDK_PATH=[${MY_NDI_SDK_PATH}]")
|
||||||
|
else()
|
||||||
|
message("Env variable MY_NDI_SDK_PATH is NOT DEFINED. Falling back to defualt value")
|
||||||
|
IF (WIN32)
|
||||||
|
set(MY_NDI_SDK_PATH "C:\\Program Files\\NDI\\NDI 5 SDK")
|
||||||
|
ELSE()
|
||||||
|
set(MY_NDI_SDK_PATH "/home/admin1/gst/from_source/NDI_SDK_for_Linux")
|
||||||
|
ENDIF()
|
||||||
|
message("Now the value of MY_NDI_SDK_PATH=[${MY_NDI_SDK_PATH}]")
|
||||||
|
endif()
|
||||||
|
set(MY_NDI_INCLUDE "${MY_NDI_SDK_PATH}/include")
|
||||||
|
include_directories(${MY_NDI_INCLUDE})
|
||||||
|
IF (WIN32)
|
||||||
|
add_library(MY_NDI_LIBS SHARED IMPORTED)
|
||||||
|
set_target_properties(MY_NDI_LIBS PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${MY_NDI_SDK_PATH}/Bin/x64/Processing.NDI.Lib.x64.dll"
|
||||||
|
IMPORTED_IMPLIB "${MY_NDI_SDK_PATH}/Lib/x64/Processing.NDI.Lib.x64.lib"
|
||||||
|
)
|
||||||
|
ELSE()
|
||||||
|
add_library(MY_NDI_LIBS SHARED IMPORTED)
|
||||||
|
set_property(TARGET MY_NDI_LIBS
|
||||||
|
PROPERTY IMPORTED_LOCATION "${MY_NDI_SDK_PATH}/lib/x86_64-linux-gnu/libndi.so"
|
||||||
|
)
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
##################### NDI End
|
||||||
|
##################### Debug Begin
|
||||||
|
#get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES)
|
||||||
|
#foreach(dir ${dirs})
|
||||||
|
# message(STATUS "This directory is in INCLUDE_DIRECTORIES variable='${dir}'")
|
||||||
|
#endforeach()
|
||||||
|
##################### Debug End
|
||||||
|
|
||||||
|
add_executable(basic-tutorial-1 basic-tutorial-1.cpp)
|
||||||
|
target_link_libraries(basic-tutorial-1 ${GST_LIBRARIES})
|
||||||
|
|
||||||
|
add_executable(basic-tutorial-2 basic-tutorial-2.cpp)
|
||||||
|
target_link_libraries(basic-tutorial-2 ${GST_LIBRARIES})
|
||||||
|
|
||||||
|
add_executable(basic-tutorial-3 basic-tutorial-3.cpp)
|
||||||
|
target_link_libraries(basic-tutorial-3 ${GST_LIBRARIES})
|
||||||
|
|
||||||
|
add_executable(basic-tutorial-4 basic-tutorial-4.cpp)
|
||||||
|
target_link_libraries(basic-tutorial-4 ${GST_LIBRARIES})
|
||||||
|
|
||||||
|
add_executable(basic-tutorial-8 basic-tutorial-8.cpp)
|
||||||
|
target_link_libraries(basic-tutorial-8
|
||||||
|
${GST_LIBRARIES} ${GST_AUDIO_LIBRARIES} ${GST_APP_LIBRARIES} ${GST_VIDEO_LIBRARIES}
|
||||||
|
)
|
||||||
|
|
||||||
|
add_executable(gst_ndi
|
||||||
|
gst_ndi.h gst_ndi.cpp
|
||||||
|
)
|
||||||
|
target_link_libraries(gst_ndi ${GST_LIBRARIES})
|
||||||
|
|
||||||
|
add_executable(gst_get_ndi
|
||||||
|
gst_get_ndi.cpp
|
||||||
|
)
|
||||||
|
target_link_libraries(gst_get_ndi
|
||||||
|
${GST_LIBRARIES} ${GST_APP_LIBRARIES} ${GST_AUDIO_LIBRARIES}
|
||||||
|
${OpenCV_LIBRARIES}
|
||||||
|
${GST_VIDEO_LIBRARIES}
|
||||||
|
oscpack
|
||||||
|
${LIBS}
|
||||||
|
${ORB_SLAM3_DIR}/build
|
||||||
|
|
||||||
|
#${OpenCV_LIBS}
|
||||||
|
${EIGEN3_LIBS}
|
||||||
|
${Pangolin_LIBRARIES}
|
||||||
|
${Boost_LIBS}
|
||||||
|
${ORB_SLAM3_DIR}/Thirdparty/DBoW2/lib/Release/DBoW2.lib
|
||||||
|
${ORB_SLAM3_DIR}/Thirdparty/g2o/build/Release/g2o.lib
|
||||||
|
${ORB_SLAM3_DIR}/build/Release/ORB_SLAM3.lib
|
||||||
|
-lboost_serialization
|
||||||
|
-lcrypto
|
||||||
|
)
|
||||||
|
|
||||||
|
#add_executable(try_1
|
||||||
|
# try_1.cpp
|
||||||
|
#)
|
||||||
|
#target_link_libraries(try_1
|
||||||
|
# ${GST_LIBRARIES} ${OpenCV_LIBRARIES} ${GST_APP_LIBRARIES}
|
||||||
|
#)
|
||||||
|
|
||||||
|
add_executable(my_ndi
|
||||||
|
my_ndi.cpp
|
||||||
|
)
|
||||||
|
target_include_directories(my_ndi PRIVATE ${MY_NDI_INCLUDE})
|
||||||
|
target_link_libraries(my_ndi MY_NDI_LIBS)
|
||||||
|
|
||||||
|
add_executable(my_ndi_source
|
||||||
|
my_ndi_source.cpp
|
||||||
|
)
|
||||||
|
target_include_directories(my_ndi_source PRIVATE ${MY_NDI_INCLUDE})
|
||||||
|
target_link_libraries(my_ndi_source MY_NDI_LIBS)
|
||||||
|
|
||||||
|
|
||||||
|
install(TARGETS my_ndi my_ndi_source gst_get_ndi gst_ndi
|
||||||
|
CONFIGURATIONS Release
|
||||||
|
RUNTIME DESTINATION ${THIS_ROOT}/../deploy-win32
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
2
README.md
Normal file
2
README.md
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
1. put run_gst_loopthrough_capture.bat in the folder, where the binary is located.
|
||||||
|
2. install changed Gstreamer from DriveCast and run the run_ndi_start_testvideosrc_ball to test.
|
||||||
859
gst_get_ndi.cpp
Normal file
859
gst_get_ndi.cpp
Normal file
@@ -0,0 +1,859 @@
|
|||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/app/gstappsink.h>
|
||||||
|
#include <sstream>
|
||||||
|
#include <format>
|
||||||
|
#include <gst/video/gstvideometa.h>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <deque>
|
||||||
|
#include <iostream>
|
||||||
|
#include <mutex>
|
||||||
|
#include <fstream>
|
||||||
|
|
||||||
|
#include "osc/OscOutboundPacketStream.h"
|
||||||
|
#include "ip/UdpSocket.h"
|
||||||
|
|
||||||
|
#include "System.h"
|
||||||
|
|
||||||
|
// CHANGE HERE TO MAKE PORT
|
||||||
|
#define ADDRESS "127.0.0.1"
|
||||||
|
#define PORT 7000
|
||||||
|
|
||||||
|
#define OUTPUT_BUFFER_SIZE 1024
|
||||||
|
|
||||||
|
#define MY_GST_USE_OPENCV
|
||||||
|
|
||||||
|
#ifdef MY_GST_USE_OPENCV
|
||||||
|
#include "opencv2/opencv.hpp"
|
||||||
|
|
||||||
|
// TODO: use synchronized deque
|
||||||
|
std::mutex g_mutex;
|
||||||
|
std::deque<cv::Mat> frameQueue;
|
||||||
|
#endif // MY_GST_USE_OPENCV
|
||||||
|
|
||||||
|
cv::Mat& extract_rot(cv::Mat& rot, const cv::Mat& trans) {
|
||||||
|
// cv::Mat rot(3, 3, CV_32F, 0.0);
|
||||||
|
for (int row = 0; row < 3; ++row) {
|
||||||
|
for (int col = 0; col < 3; ++col) {
|
||||||
|
rot.at<float>(row, col) = trans.at<float>(row, col);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rot;
|
||||||
|
}
|
||||||
|
|
||||||
|
pair<float, vector<float>> extract_deg(const cv::Mat& rotation_matrix) {
|
||||||
|
// TODO: extract_deg is producing negative values. Fix it.
|
||||||
|
float degrees;
|
||||||
|
vector<float> myvec = { rotation_matrix.at<float>(1, 2) - rotation_matrix.at<float>(2,1), rotation_matrix.at<float>(2, 0) - rotation_matrix.at<float>(0, 2), rotation_matrix.at<float>(0, 1) - rotation_matrix.at<float>(1, 0) };
|
||||||
|
|
||||||
|
float trace;
|
||||||
|
trace = rotation_matrix.at<float>(0, 0) + rotation_matrix.at<float>(1, 1) + rotation_matrix.at<float>(2, 2);
|
||||||
|
// cout << "a11 is: " << rotation_matrix.at<float>(0, 0) << " a22 is: " << rotation_matrix.at<float>(1, 1) << " a33 is: " << rotation_matrix.at<float>(2, 2) << endl;
|
||||||
|
// cout << "x is: " << (trace - 1) / 2 << endl;
|
||||||
|
degrees = acos((trace - 1) / 2);
|
||||||
|
// cout << "Calc degrees (from function) is: " << degrees << endl;
|
||||||
|
|
||||||
|
pair<float, vector<float>> result = { degrees, myvec };
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
vector<float> find_mode(const vector<vector<float>>& vec_of_rot_axes) {
|
||||||
|
cout << "Hello! This is find_mode() function" << endl;
|
||||||
|
int index = 0, counter = 0, max_counted = 0;
|
||||||
|
vector<float> el;
|
||||||
|
for (int i = 0; i < vec_of_rot_axes.size(); i++) {
|
||||||
|
el = vec_of_rot_axes[i];
|
||||||
|
|
||||||
|
cout << "Extracted el is: ";
|
||||||
|
for (auto e : el) {
|
||||||
|
cout << " " << e << " ";
|
||||||
|
}
|
||||||
|
cout << endl;
|
||||||
|
|
||||||
|
for (const auto& vec_of_rot_axe : vec_of_rot_axes) {
|
||||||
|
if (el == vec_of_rot_axe) {
|
||||||
|
cout << "Entered if (el == vec_of_rot_axe) statement" << endl;
|
||||||
|
counter += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (counter > max_counted) {
|
||||||
|
// cout << "Found new max element. Index is: " << index << "; i is: " << i << endl;
|
||||||
|
index = i;
|
||||||
|
max_counted = counter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cout << "Index is: " << index << "; And arr size is: " << vec_of_rot_axes.size() << endl;
|
||||||
|
return vec_of_rot_axes[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
GstFlowReturn new_preroll(GstAppSink* appsink, gpointer data) {
|
||||||
|
g_print("Got preroll!\n");
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
GstFlowReturn new_sample(GstAppSink* appsink, gpointer data) {
|
||||||
|
static int framecount = 0;
|
||||||
|
framecount++;
|
||||||
|
|
||||||
|
std::cout << "nnew frame " << framecount << std::endl;
|
||||||
|
|
||||||
|
GstSample* sample = gst_app_sink_pull_sample(appsink);
|
||||||
|
GstCaps* caps = gst_sample_get_caps(sample);
|
||||||
|
GstBuffer* buffer = gst_sample_get_buffer(sample);
|
||||||
|
|
||||||
|
const auto& n_memory = gst_buffer_n_memory(buffer);
|
||||||
|
std::cout << "n_memory = " << n_memory << std::endl;
|
||||||
|
std::cout << "buffer->pts = " << buffer->pts << std::endl;
|
||||||
|
std::cout << "buffer->dts = " << buffer->dts << std::endl;
|
||||||
|
std::cout << "buffer->duration = " << buffer->duration << std::endl;
|
||||||
|
std::cout << "buffer->offset = " << buffer->offset << std::endl;
|
||||||
|
std::cout << "buffer->offset_end = " << buffer->offset_end << std::endl;
|
||||||
|
|
||||||
|
const GstStructure* info = gst_sample_get_info(sample);
|
||||||
|
|
||||||
|
GstMeta* gst_meta;
|
||||||
|
gpointer state = nullptr;
|
||||||
|
while ((gst_meta = gst_buffer_iterate_meta(buffer, &state))) {
|
||||||
|
if (gst_meta->info == gst_video_caption_meta_get_info()) {
|
||||||
|
auto specific_meta = (GstVideoCaptionMeta*)gst_meta;
|
||||||
|
if (specific_meta) {
|
||||||
|
auto x = (const char*)(specific_meta->data);
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoCaptionMeta]"
|
||||||
|
<< "caption = " << std::string(x, specific_meta->size)
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_video_time_code_meta_get_info()) {
|
||||||
|
auto specific_meta = (GstVideoTimeCodeMeta*)gst_meta;
|
||||||
|
if (specific_meta) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoTimeCodeMeta]"
|
||||||
|
<< " h = " << specific_meta->tc.hours
|
||||||
|
<< " m = " << specific_meta->tc.minutes
|
||||||
|
<< " s = " << specific_meta->tc.seconds
|
||||||
|
<< " f = " << specific_meta->tc.frames
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstNdiSrcMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstNdiSrcMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstNdiSinkAudioMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstNdiSinkAudioMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoCropMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoCropMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstFramePositionerMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstFramePositionerMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstMetaDfbSurface")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstMetaDfbSurface]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstSubtitleMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstSubtitleMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstRtmpMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstRtmpMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstMpegVideoMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstMpegVideoMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstSctpReceiveMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstSctpReceiveMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstSctpSendMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstSctpSendMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstCoreMediaMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstCoreMediaMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstCoreVideoMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstCoreVideoMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstAudioDownmixMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstAudioDownmixMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstAudioClippingMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstAudioClippingMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstGLSyncMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstGLSyncMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstRTPSourceMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstRTPSourceMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoGLTextureUploadMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoGLTextureUploadMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoRegionOfInterestMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoRegionOfInterestMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoAFDMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoAFDMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoBarMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoBarMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoMultiviewMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoMultiviewMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoOverlayCompositionMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoOverlayCompositionMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstMetaXImage")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstMetaXImage]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstProtectionMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstProtectionMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstNetControlMessageMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstNetControlMessageMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstMetaTest")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstMetaTest]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstNVMMParentMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstNVMMParentMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstAudioMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstAudioMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstAudioLevelMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstAudioLevelMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoAffineTransformationMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoAffineTransformationMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("GstVideoCodecAlphaMeta")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [GstVideoCodecAlphaMeta]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else if (gst_meta->info == gst_meta_get_info("XXX")) {
|
||||||
|
std::cout << "MetaInfo is recognized to be [XXX]"
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "GstMetaInfo is not recognized."
|
||||||
|
<< " info = " << gst_meta->info
|
||||||
|
<< " api = " << gst_meta->info->api
|
||||||
|
<< std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- Read frame and convert to opencv format ---------------
|
||||||
|
|
||||||
|
GstMapInfo map;
|
||||||
|
gst_buffer_map(buffer, &map, GST_MAP_READ);
|
||||||
|
|
||||||
|
#ifdef MY_GST_USE_OPENCV
|
||||||
|
// convert gstreamer data to OpenCV Mat, you could actually
|
||||||
|
// resolve height / width from caps...
|
||||||
|
|
||||||
|
int width = 2560;
|
||||||
|
int height = 1440;
|
||||||
|
|
||||||
|
GstStructure* s = gst_caps_get_structure(caps, 0);
|
||||||
|
gboolean res = true;
|
||||||
|
res &= gst_structure_get_int(s, "width", &width);
|
||||||
|
res &= gst_structure_get_int(s, "height", &height);
|
||||||
|
|
||||||
|
cv::Mat frame(cv::Size(width, height), CV_8UC4, (char*)map.data, cv::Mat::AUTO_STEP);
|
||||||
|
|
||||||
|
{
|
||||||
|
std::lock_guard<std::mutex> guard(g_mutex);
|
||||||
|
frameQueue.push_back(frame.clone());
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
gst_buffer_unmap(buffer, &map);
|
||||||
|
|
||||||
|
// ------------------------------------------------------------
|
||||||
|
|
||||||
|
// print dot every 30 frames
|
||||||
|
if (framecount % 30 == 0) {
|
||||||
|
g_print(".");
|
||||||
|
}
|
||||||
|
|
||||||
|
// show caps on first frame
|
||||||
|
if (framecount == 1) {
|
||||||
|
g_print("%s\n", gst_caps_to_string(caps));
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_sample_unref(sample);
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
static gboolean my_bus_callback(GstBus* bus, GstMessage* message, gpointer data) {
|
||||||
|
g_print("Got %s message\n", GST_MESSAGE_TYPE_NAME(message));
|
||||||
|
switch (GST_MESSAGE_TYPE(message)) {
|
||||||
|
case GST_MESSAGE_ERROR: {
|
||||||
|
GError* err;
|
||||||
|
gchar* debug;
|
||||||
|
|
||||||
|
gst_message_parse_error(message, &err, &debug);
|
||||||
|
g_print("Error: %s\n", err->message);
|
||||||
|
g_error_free(err);
|
||||||
|
g_free(debug);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_EOS: {
|
||||||
|
/* end-of-stream */
|
||||||
|
break;
|
||||||
|
} default: {
|
||||||
|
/* unhandled message */
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* we want to be notified again the next time there is a message
|
||||||
|
* on the bus, so returning TRUE (FALSE means we want to stop watching
|
||||||
|
* for messages on the bus and our callback should not be called again)
|
||||||
|
*/
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static gint repeats = 2;
|
||||||
|
static gchar* ndi_name = nullptr;
|
||||||
|
static gint use_gui = 0;
|
||||||
|
static gboolean beep = FALSE;
|
||||||
|
|
||||||
|
static GOptionEntry entries[] =
|
||||||
|
{
|
||||||
|
{ "repeats", 'r', 0, G_OPTION_ARG_INT, &repeats, "Среднее число повторений N", "N" },
|
||||||
|
{ "ndi-name", 'n', 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
|
||||||
|
{ "gui", 'g', 0, G_OPTION_ARG_INT, &use_gui, "use gui", nullptr },
|
||||||
|
{ "beep", 'b', 0, G_OPTION_ARG_NONE, &beep, "Сигнал при выполнениии", NULL },
|
||||||
|
{ NULL }
|
||||||
|
};
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
//if (argc != 4)
|
||||||
|
//{
|
||||||
|
// cerr << endl << "Usage: ./mono_video path_to_vocabulary path_to_settings source_ndi" << endl;
|
||||||
|
// return 1;
|
||||||
|
//}
|
||||||
|
|
||||||
|
|
||||||
|
// INTPUT PARAMETERS: PATH_TO_EXE path_to_vocabulary path_to_settings source_ndi
|
||||||
|
// TODO: DON'T FORGET TO CHANGE THE run_gst_loopthrough_capture.cmd SCRIPT
|
||||||
|
std::cout << "argc = " << argc << std::endl;
|
||||||
|
GError* error = nullptr;
|
||||||
|
GOptionContext* context;
|
||||||
|
|
||||||
|
context = g_option_context_new("- test tree model performance");
|
||||||
|
g_option_context_add_main_entries(context, entries, "bla");
|
||||||
|
|
||||||
|
char** argv_gst;
|
||||||
|
argv_gst = new char* [2];
|
||||||
|
argv_gst[0] = new char[200];
|
||||||
|
argv_gst[1] = new char[200];
|
||||||
|
|
||||||
|
strcpy(argv_gst[0], argv[0]);
|
||||||
|
strcpy(argv_gst[1], argv[3]);
|
||||||
|
|
||||||
|
// QUESTION 1.
|
||||||
|
g_option_context_parse(context, &argc - 2, &argv_gst, &error);
|
||||||
|
|
||||||
|
g_option_context_parse(context, &argc, &argv, &error);
|
||||||
|
|
||||||
|
if (!ndi_name) {
|
||||||
|
std::cout << "ndi-name is not provided" << std::endl;
|
||||||
|
ndi_name = (char*)malloc(sizeof(char) * 100);
|
||||||
|
// ndi_name = "DESKTOP - O5PNOBN(Test Pattern)";
|
||||||
|
std::cout << "ndi-name (default) = '" << ndi_name << "'" << std::endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "ndi-name = '" << ndi_name << "'" << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
|
||||||
|
int fake_argc = 1;
|
||||||
|
gst_init(&fake_argc, &argv);
|
||||||
|
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << "ndisrc ndi-name=\"" << ndi_name << "\" ! ndisrcdemux name=demux "
|
||||||
|
<< "demux.video ! queue ! tee name=my_tee "
|
||||||
|
<< "my_tee. ! queue ! videoconvert ! autovideosink "
|
||||||
|
<< "my_tee. ! queue ! videoconvert ! appsink name=my_sink";
|
||||||
|
std::string my_pipeline = ss.str();
|
||||||
|
|
||||||
|
|
||||||
|
GstElement* pipeline = gst_parse_launch(my_pipeline.c_str(), nullptr);
|
||||||
|
|
||||||
|
|
||||||
|
/* get sink */
|
||||||
|
GstElement* sink = gst_bin_get_by_name(GST_BIN(pipeline), "my_sink");
|
||||||
|
|
||||||
|
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
|
||||||
|
gst_app_sink_set_drop((GstAppSink*)sink, true);
|
||||||
|
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
|
||||||
|
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
|
||||||
|
gst_app_sink_set_callbacks(GST_APP_SINK(sink), &callbacks, NULL, NULL);
|
||||||
|
|
||||||
|
GstBus* bus;
|
||||||
|
guint bus_watch_id;
|
||||||
|
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
|
||||||
|
bus_watch_id = gst_bus_add_watch(bus, my_bus_callback, NULL);
|
||||||
|
gst_object_unref(bus);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref(pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef MY_GST_USE_OPENCV
|
||||||
|
auto lambda_1 = [](char** argv) {
|
||||||
|
|
||||||
|
// --------------------------------- SLAM SYSTEM VARIABLES ---------------------------------
|
||||||
|
|
||||||
|
// Create SLAM system. It initializes all system threads and gets ready to process frames.
|
||||||
|
ORB_SLAM3::System SLAM(argv[1], argv[2], ORB_SLAM3::System::MONOCULAR, true);
|
||||||
|
|
||||||
|
std::printf("SLAM system initialized\n");
|
||||||
|
|
||||||
|
// Main loop
|
||||||
|
cv::Mat frame;
|
||||||
|
|
||||||
|
int cnt = 0;
|
||||||
|
const double time_step = 1.0;
|
||||||
|
double ts = 0;
|
||||||
|
char matrix_name[100];
|
||||||
|
vector<float> vec_of_deg, values;
|
||||||
|
vector<vector<float>> vec_of_rot_axis;
|
||||||
|
|
||||||
|
// ---- INITIALIZE FOR THE PROCESSING OF AXIS LOSS AND FOR THE AXIS VECTOR INFORMATION ----
|
||||||
|
float skew1 = 0.0;
|
||||||
|
float DIFF_TO_CENTER = 0.0;
|
||||||
|
float curr_deg; // later I'll assign the exact value
|
||||||
|
vector<float> curr_vec;
|
||||||
|
vector<float> mode1, mode2;
|
||||||
|
|
||||||
|
vector<vector<float>> accum, accum2;
|
||||||
|
int counter2, j = 0;
|
||||||
|
std::cout << "J is: " << j;
|
||||||
|
vector<float> mode_vec, mode_vec2; // 2 вектора, для аккумуляции слева и справа
|
||||||
|
// zero_flag - индикатор, что текущий элемент пошёл в обратную сторону (около нуля)
|
||||||
|
// mirror_flag - значения на данный момент должны отражаться
|
||||||
|
bool zero_flag, mirror_flag = false;
|
||||||
|
float mirror_point = 0.0;
|
||||||
|
|
||||||
|
// --------------------------------- SLAM SYSTEM VARIABLES ---------------------------------
|
||||||
|
|
||||||
|
// Let's do two steps outside the loop.
|
||||||
|
for (int i = 1; i <= 2; i++) {
|
||||||
|
|
||||||
|
if (use_gui) {
|
||||||
|
cv::namedWindow("preview", 1);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// cv::namedWindow("no preview", 1);
|
||||||
|
}
|
||||||
|
cv::Mat frame;
|
||||||
|
|
||||||
|
char* buffer = nullptr;
|
||||||
|
|
||||||
|
// EXTRACTING FRAME HERE.
|
||||||
|
{
|
||||||
|
std::lock_guard<std::mutex> guard(g_mutex);
|
||||||
|
if (frameQueue.size() > 0) {
|
||||||
|
frame = frameQueue.front();
|
||||||
|
frameQueue.pop_front();
|
||||||
|
std::cout << "we have a frame to process..." << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cv::Mat Tcw = SLAM.TrackMonocular(frame, ts, vector<ORB_SLAM3::IMU::Point>(), "");
|
||||||
|
cv::Mat Rot(3, 3, CV_32F, 0.0);
|
||||||
|
std::cout << Tcw << std::endl;
|
||||||
|
|
||||||
|
if (!Tcw.empty()) {
|
||||||
|
sprintf(matrix_name, "matrix%d", cnt);
|
||||||
|
extract_rot(Rot, Tcw);
|
||||||
|
// cout << "Extracted rotation matrix is: " << Rot;
|
||||||
|
auto deg_vec = extract_deg(Rot);
|
||||||
|
|
||||||
|
// QUESTION 2.
|
||||||
|
curr_deg = -deg_vec.first * 57.29;
|
||||||
|
// TODO: Invert curr_vec too. (put the minus sign to each element). (You can define the - operator fot the vector).
|
||||||
|
curr_vec = deg_vec.second;
|
||||||
|
cout << "Successfully created curr_deg and curr_vec" << endl;
|
||||||
|
|
||||||
|
// LET'S DEFINE CONSTANT TO ZERO OUT THE START
|
||||||
|
if (i == 1) {
|
||||||
|
DIFF_TO_CENTER = 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
vec_of_deg.push_back(curr_deg - DIFF_TO_CENTER);
|
||||||
|
vec_of_rot_axis.push_back(curr_vec);
|
||||||
|
values.push_back(curr_deg - DIFF_TO_CENTER);
|
||||||
|
cout << "Successfully pushed to the vectors " << endl;
|
||||||
|
|
||||||
|
//cout << curr_deg - DIFF_TO_CENTER << " " << curr_vec[0] << " " << curr_vec[1] << " " << curr_vec[2] << endl;
|
||||||
|
// SEND THE RESULT THROUGH OSC
|
||||||
|
//outfile << curr_deg - DIFF_TO_CENTER << " " << curr_vec[0] << " " << curr_vec[1] << " " << curr_vec[2] << endl;
|
||||||
|
cout << "Successfully written to the file" << endl;
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
cnt++;
|
||||||
|
ts += time_step;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
//if (use_gui) {
|
||||||
|
// cv::namedWindow("preview", cv::WINDOW_AUTOSIZE);
|
||||||
|
//}
|
||||||
|
//else {
|
||||||
|
// // cv::namedWindow("no preview", 1);
|
||||||
|
//}
|
||||||
|
cv::Mat frame;
|
||||||
|
|
||||||
|
char* buffer = nullptr;
|
||||||
|
// {
|
||||||
|
// int length;
|
||||||
|
// std::cout << "trying to open example.bin" << std::endl;
|
||||||
|
// std::ifstream is;
|
||||||
|
// is.open("example.bin", std::ios::binary);
|
||||||
|
// is.seekg(0, std::ios::end);
|
||||||
|
// length = is.tellg();
|
||||||
|
// is.seekg(0, std::ios::beg);
|
||||||
|
// buffer = new char [length];
|
||||||
|
// is.read(buffer, length);
|
||||||
|
// is.close();
|
||||||
|
// frame = cv::Mat(cv::Size(1920, 1080), CV_8UC4, (char*)buffer, cv::Mat::AUTO_STEP);
|
||||||
|
// }
|
||||||
|
{
|
||||||
|
std::lock_guard<std::mutex> guard(g_mutex);
|
||||||
|
if (frameQueue.size() > 0) {
|
||||||
|
frame = frameQueue.front();
|
||||||
|
frameQueue.pop_front();
|
||||||
|
std::cout << "we have a frame to process..." << std::endl;
|
||||||
|
|
||||||
|
if (!frame.empty()) {
|
||||||
|
|
||||||
|
cv::Mat Tcw = SLAM.TrackMonocular(frame, ts, vector<ORB_SLAM3::IMU::Point>(), "");
|
||||||
|
cv::Mat Rot(3, 3, CV_32F, 0.0);
|
||||||
|
std::cout << Tcw << std::endl;
|
||||||
|
if (!Tcw.empty()) {
|
||||||
|
sprintf(matrix_name, "matrix%d", cnt);
|
||||||
|
extract_rot(Rot, Tcw);
|
||||||
|
// cout << "Extracted rotation matrix is: " << Rot;
|
||||||
|
// Extract the degree and the vector from the rotation matrix.
|
||||||
|
auto deg_vec = extract_deg(Rot); // returns a degree and a vector of rotation.
|
||||||
|
|
||||||
|
float new_deg = -deg_vec.first * 57.29 - DIFF_TO_CENTER;
|
||||||
|
vector<float> new_vec = deg_vec.second;
|
||||||
|
cout << "Successfully created curr_deg and curr_vec" << endl;
|
||||||
|
|
||||||
|
vec_of_deg.push_back(new_deg);
|
||||||
|
vec_of_rot_axis.push_back(new_vec);
|
||||||
|
j++;
|
||||||
|
cout << "Pushed to the vectors. Line 207" << endl;
|
||||||
|
|
||||||
|
// ---- II PART OF THE PROCESSING ----
|
||||||
|
|
||||||
|
// TODO: II PART OF PROCESSING MIRRORED FIRST CHANGE, BUT NOT THE REST.
|
||||||
|
|
||||||
|
// Если текущий градус больше epsilon = 5, то zero_flag = false
|
||||||
|
// Can cause a problem, when accumulating values after turning on the zero_flag.
|
||||||
|
// TODO: accum2 is full when the zero_flag enables, which is bad. work on that.
|
||||||
|
if (zero_flag) {
|
||||||
|
if ((vec_of_deg[j - 1] < -5 || vec_of_deg[j - 1] > 5) && accum2.size() == 5) {
|
||||||
|
zero_flag = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (zero_flag) { cout << "Zero flag is: true" << endl; }
|
||||||
|
else { cout << "Zero flag is: false" << endl; }
|
||||||
|
|
||||||
|
// Если нет zero_flag, а в accum2 что-то есть, то опустошим его.
|
||||||
|
if (!(zero_flag) && !accum2.empty()) { accum2 = {}; }
|
||||||
|
|
||||||
|
// Сохраняем последние 5 значений векторов
|
||||||
|
if (!zero_flag) {
|
||||||
|
cout << "Line 211 ok..." << endl;
|
||||||
|
if (accum.size() == 5) {
|
||||||
|
cout << "Accum size = 5." << endl;
|
||||||
|
accum.erase(accum.begin());
|
||||||
|
cout << "Line 215 ok..." << endl;
|
||||||
|
accum.push_back(vec_of_rot_axis[j - 1]);
|
||||||
|
cout << "Line 217 ok..." << endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
cout << "Accum size != 5." << endl;
|
||||||
|
cout << "j is: " << j << " len of vec_of_rot_axis is: " << vec_of_rot_axis.size() << endl;
|
||||||
|
accum.push_back(vec_of_rot_axis[j - 1]);
|
||||||
|
cout << "Line 223 ok..." << endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Найдем элемент, который начал расти, а не убывать около нуля
|
||||||
|
if (!zero_flag) {
|
||||||
|
if (vec_of_deg[j - 1] > -5 && vec_of_deg[j - 1] < 5) {
|
||||||
|
// Если нынешний элемент уже не меньше предыдущего, а предыдущая разность тоже около нуля, при этом абсолютная разность между градусами больше, чем 0.01
|
||||||
|
if (abs(vec_of_deg[j - 1]) >= abs(vec_of_deg[j - 2]) && (abs(vec_of_deg[j - 2] - vec_of_deg[j - 3]) < 10) && (abs(vec_of_deg[j - 1] - vec_of_deg[j - 2]) > .3)) {
|
||||||
|
zero_flag = true;
|
||||||
|
cout << "Line 233 and 232 ok..." << endl;
|
||||||
|
|
||||||
|
}
|
||||||
|
// else {
|
||||||
|
// zero_flag = false;
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cout << "Accum size is: " << accum.size() << endl;
|
||||||
|
cout << "Accum2 size is: " << accum2.size() << endl;
|
||||||
|
if (zero_flag) {
|
||||||
|
// Если набрали 5 элементов
|
||||||
|
cout << "Entered in zero_flag if..." << endl;
|
||||||
|
cout << "Accum2.size() is: " << accum2.size() << endl;
|
||||||
|
if (accum2.size() == 5 && accum.size() == 5) {
|
||||||
|
// Имеем массивы векторов. Найдём их моды и сравним.
|
||||||
|
cout << "Accum size: " << accum.size() << endl;
|
||||||
|
cout << "Accum2 size: " << accum2.size() << endl;
|
||||||
|
mode1 = find_mode(accum);
|
||||||
|
mode2 = find_mode(accum2);
|
||||||
|
cout << "Line 246 and 245 ok..." << endl;
|
||||||
|
|
||||||
|
bool compar_res = mode1 == mode2;
|
||||||
|
cout << "Line 250 ok..." << endl;
|
||||||
|
// Если градусы около нуля, а значения векторов поменялись, то отражаем
|
||||||
|
// Input data leave it as it as, but the output data has to be processed.
|
||||||
|
if (!(compar_res)) {
|
||||||
|
// Если мы нашли ту самую точку, то отразим точки, которые мы накопили, и прибавим к ним точку
|
||||||
|
// отражения, а также изменим точку отражения, и изменим флаг mirror_flag = True
|
||||||
|
cout << "Нашли ту самую точку!" << endl;
|
||||||
|
// mirror_point += values[j-6];
|
||||||
|
// cout << "Mirror point after: " << mirror_point << endl;
|
||||||
|
cout << "Line 255 ok..." << endl;
|
||||||
|
|
||||||
|
if (mirror_flag) {
|
||||||
|
mirror_flag = false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
mirror_flag = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// for (int i = j-6; i < j-1; i++){
|
||||||
|
// values[i] = -values[i] + mirror_point;
|
||||||
|
// }
|
||||||
|
// cout << "Lines 263 and 264 are ok" << "j is: " << j << endl;
|
||||||
|
|
||||||
|
}
|
||||||
|
accum2 = {};
|
||||||
|
cout << "Making zero flag false..." << endl;
|
||||||
|
zero_flag = false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (accum2.size() < 5) {
|
||||||
|
accum2.push_back(vec_of_rot_axis[j - 1]);
|
||||||
|
cout << "Line 274 ok..." << endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Сохраняем значения...
|
||||||
|
if (mirror_flag) {
|
||||||
|
; cout << "Mirror flag is on;" << " vec_of_deg size: " << vec_of_deg.size() << "; j is: " << j << endl;
|
||||||
|
values.push_back(-vec_of_deg[j - 1] + mirror_point);
|
||||||
|
// cout << "Line 281 ok..." << endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
; cout << "Mirror flag is off" << " vec_of_deg size: " << vec_of_deg.size() << "; j is: " << j << endl;
|
||||||
|
values.push_back(vec_of_deg[j - 1]);
|
||||||
|
// cout << "Line 284 ok..." << endl;
|
||||||
|
}
|
||||||
|
cout << "Processed value is: " << values[j - 1] << endl; cout << " " << endl;
|
||||||
|
|
||||||
|
// --------- I PART OF THE PROCESSING ---------
|
||||||
|
// values[j-1] += skew1;
|
||||||
|
// float diff = (values[j-2] - values[j-1]);
|
||||||
|
// cout << "New deg is: " << new_deg << "Diff is: " << diff << endl;
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// // Если разница больше 10, то скорее всего произошла потеря.
|
||||||
|
// if (abs(diff) > 10) {
|
||||||
|
// cout << "Diff is more than 10; Correcting... " << endl;
|
||||||
|
// values[j-1] += diff;
|
||||||
|
// skew1 += diff;
|
||||||
|
// }
|
||||||
|
// --------- I PART OF THE PROCESSING ---------
|
||||||
|
|
||||||
|
// Запись в файл.
|
||||||
|
//outfile << values[j - 1] << " " << new_vec[0] << " " << new_vec[1] << " " << new_vec[2] << " " << cnt << endl;
|
||||||
|
// cout << "Successfully written to the file" << endl;
|
||||||
|
|
||||||
|
// Выполнить отправку в протокол OSC.
|
||||||
|
//cv::Vec3d res(1., 1., 1.);
|
||||||
|
//std::cout << "defined Vector is: " << res[0] << res[1] << res[2] << std::endl;
|
||||||
|
std::cout << "message received!" << std::endl;
|
||||||
|
UdpTransmitSocket transmitSocket(IpEndpointName(ADDRESS, PORT));
|
||||||
|
|
||||||
|
char buffer[OUTPUT_BUFFER_SIZE];
|
||||||
|
osc::OutboundPacketStream p(buffer, OUTPUT_BUFFER_SIZE);
|
||||||
|
|
||||||
|
std::string str;
|
||||||
|
str = std::to_string(values[j-1]) + " " + std::to_string(new_vec[0]) + " " + std::to_string(new_vec[1]) + " " + std::to_string(new_vec[2]);
|
||||||
|
char msg[40];
|
||||||
|
strcpy(msg, str.c_str());
|
||||||
|
|
||||||
|
p << osc::BeginBundleImmediate
|
||||||
|
<< osc::BeginMessage("/test3") << msg << osc::EndMessage
|
||||||
|
/* << osc::BeginMessage("/test2")
|
||||||
|
<< true << 24 << (float)10.8 << "world" << osc::EndMessage*/
|
||||||
|
<< osc::EndBundle;
|
||||||
|
|
||||||
|
//p << osc::BeginBundleImmediate
|
||||||
|
// << osc::BeginMessage("/test1")
|
||||||
|
// //res[0] << res[1] << res[2] <<
|
||||||
|
// << true << "blah" << osc::EndMessage << osc::EndBundle;
|
||||||
|
////<< osc::BeginMessage("/test2")
|
||||||
|
////<< true << 24 << (float)10.8 << "world" << osc::EndMessage
|
||||||
|
|
||||||
|
transmitSocket.Send(p.Data(), p.Size());
|
||||||
|
std::cout << "Message sent!" << std::endl;
|
||||||
|
|
||||||
|
// ---- II PART OF THE PROCESSING ----
|
||||||
|
|
||||||
|
curr_deg = new_deg;
|
||||||
|
curr_vec = new_vec;
|
||||||
|
}
|
||||||
|
cnt++;
|
||||||
|
ts += time_step;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "Don't have any frames yet ..." << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//if (use_gui) {
|
||||||
|
// if (!frame.empty()) {
|
||||||
|
// cv::Mat edges;
|
||||||
|
// cvtColor(frame, edges, cv::COLOR_BGR2BGRA);
|
||||||
|
// cv::imshow("preview", frame);
|
||||||
|
// int key = cv::waitKey(10);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//}
|
||||||
|
delete[] buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::printf("End of video\n");
|
||||||
|
// Stop all threads
|
||||||
|
SLAM.Shutdown();
|
||||||
|
|
||||||
|
std::printf("Done.\n");
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
std::cout << "Lambda function defined ..." << std::endl;
|
||||||
|
|
||||||
|
char** argv_orb;
|
||||||
|
argv_orb = new char* [3];
|
||||||
|
argv_orb[0] = new char[200];
|
||||||
|
argv_orb[1] = new char[200];
|
||||||
|
argv_orb[2] = new char[200];
|
||||||
|
|
||||||
|
strcpy(argv_orb[0], argv[0]);
|
||||||
|
strcpy(argv_orb[1], argv[1]);
|
||||||
|
strcpy(argv_orb[2], argv[2]);
|
||||||
|
|
||||||
|
|
||||||
|
std::thread t1(lambda_1, argv_orb);
|
||||||
|
|
||||||
|
std::cout << "Lambda function works ini the thread t1 ..." << std::endl;
|
||||||
|
|
||||||
|
|
||||||
|
bool is_terminated = false;
|
||||||
|
while (!is_terminated) {
|
||||||
|
// g_main_iteration(false);
|
||||||
|
g_main_context_iteration(NULL, false);
|
||||||
|
}
|
||||||
|
t1.join();
|
||||||
|
#else
|
||||||
|
bool is_terminated = false;
|
||||||
|
while (!is_terminated) {
|
||||||
|
g_main_context_iteration(NULL, false);
|
||||||
|
}
|
||||||
|
#endif // MY_GST_USE_OPENCV
|
||||||
|
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
|
||||||
|
gst_object_unref(GST_OBJECT(pipeline));
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
40
gst_get_ndi.h
Normal file
40
gst_get_ndi.h
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/app/gstappsink.h>
|
||||||
|
#include <sstream>
|
||||||
|
#include <format>
|
||||||
|
#include <gst/video/gstvideometa.h>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <deque>
|
||||||
|
#include <iostream>
|
||||||
|
#include <mutex>
|
||||||
|
#include <fstream>
|
||||||
|
|
||||||
|
#define MY_GST_USE_OPENCV
|
||||||
|
|
||||||
|
#ifdef MY_GST_USE_OPENCV
|
||||||
|
#include "opencv2/opencv.hpp"
|
||||||
|
|
||||||
|
// TODO: use synchronized deque
|
||||||
|
std::mutex g_mutex;
|
||||||
|
std::deque<cv::Mat> frameQueue;
|
||||||
|
#endif // MY_GST_USE_OPENCV
|
||||||
|
|
||||||
|
GstFlowReturn new_preroll(GstAppSink* appsink, gpointer data);
|
||||||
|
|
||||||
|
GstFlowReturn new_sample(GstAppSink* appsink, gpointer data);
|
||||||
|
|
||||||
|
static gboolean my_bus_callback(GstBus* bus, GstMessage* message, gpointer data);
|
||||||
|
|
||||||
|
static gint repeats = 2;
|
||||||
|
static gchar* ndi_name = nullptr;
|
||||||
|
static gint use_gui = 0;
|
||||||
|
static gboolean beep = FALSE;
|
||||||
|
|
||||||
|
static GOptionEntry entries[] =
|
||||||
|
{
|
||||||
|
{ "repeats", 'r', 0, G_OPTION_ARG_INT, &repeats, "Среднее число повторений N", "N" },
|
||||||
|
{ "ndi-name", 'n', 0, G_OPTION_ARG_STRING, &ndi_name, "you can enter the string here (ndi-name)", "M" },
|
||||||
|
{ "gui", 'g', 0, G_OPTION_ARG_INT, &use_gui, "use gui", nullptr },
|
||||||
|
{ "beep", 'b', 0, G_OPTION_ARG_NONE, &beep, "Сигнал при выполнениии", NULL },
|
||||||
|
{ NULL }
|
||||||
|
};
|
||||||
243
gst_ndi.cpp
Normal file
243
gst_ndi.cpp
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
#include "gst_ndi.h"
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
gboolean terminate = FALSE;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
GstElement *my_src = nullptr;
|
||||||
|
my_src = gst_element_factory_make ("ndivideosrc", "my_ndi_src");
|
||||||
|
std::cout << my_src << std::endl;
|
||||||
|
if (!my_src) {
|
||||||
|
std::cerr << "Can't create ndivideosrc" << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
std::cerr << "ndivideosrc is created with success!" << std::endl;
|
||||||
|
|
||||||
|
GstElement *my_sink = nullptr;
|
||||||
|
my_sink = gst_element_factory_make ("autovideosink", "my_sink");
|
||||||
|
std::cout << my_sink << std::endl;
|
||||||
|
if (!my_sink) {
|
||||||
|
std::cerr << "Can't create autovideosink" << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
std::cerr << "autovideosink is created with success!" << std::endl;
|
||||||
|
|
||||||
|
GstElement *pipeline = nullptr;
|
||||||
|
pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
std::cout << my_sink << std::endl;
|
||||||
|
if (!pipeline) {
|
||||||
|
std::cerr << "Can't create pipeline" << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
std::cerr << "pipeline is created with success!" << std::endl;
|
||||||
|
|
||||||
|
gst_bin_add_many (GST_BIN (pipeline), my_src, my_sink, NULL);
|
||||||
|
std::cout << "here!" << std::endl;
|
||||||
|
if (!gst_element_link_many (my_src, my_sink, NULL)) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
std::cout << "here -2 !" << std::endl;
|
||||||
|
|
||||||
|
// g_object_set (my_src, "ndi-name", "DESKTOP-O5PNOBN (NVIDIA Quadro RTX 5000 1)", NULL);
|
||||||
|
// g_object_set (my_src, "ndi-name", "DESKTOP-O5PNOBN (Test Pattern)", NULL);
|
||||||
|
g_object_set (my_src, "ndi-name", "DESKTOP-HI0BRMI (Test Pattern)", NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Listen to the bus */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
do {
|
||||||
|
msg = gst_bus_timed_pop_filtered(
|
||||||
|
bus,
|
||||||
|
GST_CLOCK_TIME_NONE,
|
||||||
|
static_cast<GstMessageType>(GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_TAG)
|
||||||
|
);
|
||||||
|
/* Parse message */
|
||||||
|
if (msg != NULL) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS: {
|
||||||
|
g_print("End-Of-Stream reached.\n");
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_TAG: {
|
||||||
|
GstTagList *tags = NULL;
|
||||||
|
gst_message_parse_tag (msg, &tags);
|
||||||
|
g_print ("Got tags from element %s:\n", GST_OBJECT_NAME (msg->src));
|
||||||
|
std::cout << "tag" << std::endl;
|
||||||
|
// gst_tag_list_foreach (tags, print_one_tag, NULL);
|
||||||
|
g_print ("\n");
|
||||||
|
gst_tag_list_unref (tags);
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_STATE_CHANGED:
|
||||||
|
/* We are only interested in state-changed messages from the pipeline */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
g_print ("Pipeline state changed from %s to %s:\n",
|
||||||
|
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* We should not reach here */
|
||||||
|
g_printerr ("Unexpected message received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
||||||
|
} while (!terminate);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
if (msg != NULL)
|
||||||
|
gst_message_unref (msg);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.source = gst_element_factory_make ("uridecodebin", "source");
|
||||||
|
data.convert = gst_element_factory_make ("audioconvert", "convert");
|
||||||
|
data.resample = gst_element_factory_make ("audioresample", "resample");
|
||||||
|
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
data.pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!data.pipeline || !data.source || !data.convert || !data.resample || !data.sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Build the pipeline. Note that we are NOT linking the source at this
|
||||||
|
* point. We will do it later. */
|
||||||
|
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert, data.resample, data.sink, NULL);
|
||||||
|
if (!gst_element_link_many (data.convert, data.resample, data.sink, NULL)) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Connect to the pad-added signal */
|
||||||
|
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
/* Free resources */
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function will be called by the pad-added signal */
|
||||||
|
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
|
||||||
|
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
|
||||||
|
GstPadLinkReturn ret;
|
||||||
|
GstCaps *new_pad_caps = NULL;
|
||||||
|
GstStructure *new_pad_struct = NULL;
|
||||||
|
const gchar *new_pad_type = NULL;
|
||||||
|
|
||||||
|
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
|
||||||
|
|
||||||
|
/* If our converter is already linked, we have nothing to do here */
|
||||||
|
if (gst_pad_is_linked (sink_pad)) {
|
||||||
|
g_print ("We are already linked. Ignoring.\n");
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check the new pad's type */
|
||||||
|
new_pad_caps = gst_pad_get_current_caps (new_pad);
|
||||||
|
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
|
||||||
|
new_pad_type = gst_structure_get_name (new_pad_struct);
|
||||||
|
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
|
||||||
|
g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Attempt the link */
|
||||||
|
ret = gst_pad_link (new_pad, sink_pad);
|
||||||
|
if (GST_PAD_LINK_FAILED (ret)) {
|
||||||
|
g_print ("Type is '%s' but link failed.\n", new_pad_type);
|
||||||
|
} else {
|
||||||
|
g_print ("Link succeeded (type '%s').\n", new_pad_type);
|
||||||
|
}
|
||||||
|
|
||||||
|
exit:
|
||||||
|
/* Unreference the new pad's caps, if we got them */
|
||||||
|
if (new_pad_caps != NULL)
|
||||||
|
gst_caps_unref (new_pad_caps);
|
||||||
|
|
||||||
|
/* Unreference the sink pad */
|
||||||
|
gst_object_unref (sink_pad);
|
||||||
|
}
|
||||||
|
|
||||||
|
void print_one_tag(GstTagList *list, gchar *tag, gpointer user_data) {
|
||||||
|
int i, num;
|
||||||
|
num = gst_tag_list_get_tag_size (list, tag);
|
||||||
|
for (i = 0; i < num; ++i) {
|
||||||
|
const GValue *val;
|
||||||
|
/* Note: when looking for specific tags, use the gst_tag_list_get_xyz() API,
|
||||||
|
* we only use the GValue approach here because it is more generic */
|
||||||
|
val = gst_tag_list_get_value_index (list, tag, i);
|
||||||
|
if (G_VALUE_HOLDS_STRING (val)) {
|
||||||
|
g_print ("\t%20s : %s\n", tag, g_value_get_string (val));
|
||||||
|
} else if (G_VALUE_HOLDS_UINT (val)) {
|
||||||
|
g_print ("\t%20s : %u\n", tag, g_value_get_uint (val));
|
||||||
|
} else if (G_VALUE_HOLDS_DOUBLE (val)) {
|
||||||
|
g_print ("\t%20s : %g\n", tag, g_value_get_double (val));
|
||||||
|
} else if (G_VALUE_HOLDS_BOOLEAN (val)) {
|
||||||
|
g_print ("\t%20s : %s\n", tag,
|
||||||
|
(g_value_get_boolean (val)) ? "true" : "false");
|
||||||
|
} else if (GST_VALUE_HOLDS_BUFFER (val)) {
|
||||||
|
GstBuffer *buf = gst_value_get_buffer (val);
|
||||||
|
guint buffer_size = gst_buffer_get_size (buf);
|
||||||
|
|
||||||
|
g_print ("\t%20s : buffer of size %u\n", tag, buffer_size);
|
||||||
|
} else if (GST_VALUE_HOLDS_DATE_TIME (val)) {
|
||||||
|
// GstDateTime *dt = g_value_get_boxed(val);
|
||||||
|
// gchar *dt_str = gst_date_time_to_iso8601_string (dt);
|
||||||
|
// g_print ("\t%20s : %s\n", tag, dt_str);
|
||||||
|
// g_free (dt_str);
|
||||||
|
} else {
|
||||||
|
g_print ("\t%20s : tag of type '%s'\n", tag, G_VALUE_TYPE_NAME (val));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
21
gst_ndi.h
Normal file
21
gst_ndi.h
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#ifndef MYGST_GST_NDI_H
|
||||||
|
#define MYGST_GST_NDI_H
|
||||||
|
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstElement *source;
|
||||||
|
GstElement *convert;
|
||||||
|
GstElement *resample;
|
||||||
|
GstElement *sink;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Handler for the pad-added signal */
|
||||||
|
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
|
||||||
|
static void print_one_tag (const GstTagList * list, const gchar * tag, gpointer user_data);
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]);
|
||||||
|
|
||||||
|
#endif //MYGST_GST_NDI_H
|
||||||
162
my_ndi.cpp
Normal file
162
my_ndi.cpp
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
#include <cstdio>
|
||||||
|
#include <chrono>
|
||||||
|
#include <string>
|
||||||
|
#include <iostream>
|
||||||
|
#include <csignal>
|
||||||
|
#include <Processing.NDI.Lib.h>
|
||||||
|
|
||||||
|
#ifdef _WIN32
|
||||||
|
#ifdef _WIN64
|
||||||
|
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
|
||||||
|
#else // _WIN64
|
||||||
|
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
|
||||||
|
#endif // _WIN64
|
||||||
|
#endif // _WIN32
|
||||||
|
|
||||||
|
using namespace std::chrono;
|
||||||
|
|
||||||
|
static bool is_terminated = false;
|
||||||
|
|
||||||
|
void signalHandler(int signum) {
|
||||||
|
// Ctrl+C -> SIGINT
|
||||||
|
// SystemD -> SIGTERM
|
||||||
|
std::cout << "Interrupt signal (" << signum << ") received.";
|
||||||
|
is_terminated = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
if (argc < 2) {
|
||||||
|
std::cout << "too few arguments" << std::endl;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
signal(SIGINT, signalHandler);
|
||||||
|
signal(SIGTERM, signalHandler);
|
||||||
|
std::string magic_string = argv[1];
|
||||||
|
std::cout << "we got the ndi-stream-name = " << magic_string << std::endl;
|
||||||
|
|
||||||
|
// Not required, but "correct" (see the SDK documentation.
|
||||||
|
if (!NDIlib_initialize()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
// We are going to create an NDI finder that locates sources on the network.
|
||||||
|
NDIlib_find_instance_t pNDI_find = NDIlib_find_create_v2();
|
||||||
|
if (!pNDI_find) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
// Run for one minute
|
||||||
|
uint32_t no_sources = 0;
|
||||||
|
const NDIlib_source_t* p_sources = nullptr;
|
||||||
|
int stream_offset = -1;
|
||||||
|
while (stream_offset < 0 && !is_terminated) {
|
||||||
|
printf("Looking for sources ...\n");
|
||||||
|
NDIlib_find_wait_for_sources(pNDI_find, 5000/* One second */);
|
||||||
|
// // Wait up till 5 seconds to check for new sources to be added or removed
|
||||||
|
// if (!NDIlib_find_wait_for_sources(pNDI_find, 5000/* 5 seconds */)) {
|
||||||
|
// printf("No change to the sources found.\n");
|
||||||
|
// continue;
|
||||||
|
// }
|
||||||
|
p_sources = NDIlib_find_get_current_sources(pNDI_find, &no_sources);
|
||||||
|
// Display all the sources.
|
||||||
|
printf("Network sources (%u found).\n", no_sources);
|
||||||
|
for (uint32_t i = 0; i < no_sources; i++) {
|
||||||
|
printf("%u. %s\n", i + 1, p_sources[i].p_ndi_name);
|
||||||
|
}
|
||||||
|
for (uint32_t i = 0; i < no_sources; i++) {
|
||||||
|
const auto& the_name = std::string(p_sources[i].p_ndi_name);
|
||||||
|
if (the_name == magic_string) {
|
||||||
|
stream_offset = static_cast<int>(i);
|
||||||
|
std::cout << "selecting the stream [" << the_name << "] with the index = " << i << std::endl;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
std::cout << "the stream [" << the_name << "] noes not match. index = " << i << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (stream_offset >= 0) {
|
||||||
|
std::cout << "found the stream" << std::endl;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
std::cout << "no such stream" << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We now have at least one source, so we create a receiver to look at it.
|
||||||
|
NDIlib_recv_instance_t pNDI_recv = NDIlib_recv_create_v3();
|
||||||
|
if (!pNDI_recv) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to our sources
|
||||||
|
NDIlib_recv_connect(pNDI_recv, p_sources + stream_offset);
|
||||||
|
|
||||||
|
// Destroy the NDI finder. We needed to have access to the pointers to p_sources[0]
|
||||||
|
NDIlib_find_destroy(pNDI_find);
|
||||||
|
|
||||||
|
// Run for one minute
|
||||||
|
using namespace std::chrono;
|
||||||
|
int counter = 0;
|
||||||
|
while (!is_terminated) {
|
||||||
|
counter += 1;
|
||||||
|
// The descriptors
|
||||||
|
NDIlib_video_frame_v2_t video_frame;
|
||||||
|
// NDIlib_audio_frame_v3_t audio_frame;
|
||||||
|
NDIlib_metadata_frame_t metadata_frame;
|
||||||
|
const auto& timeout = 5000;
|
||||||
|
|
||||||
|
const auto& ret = NDIlib_recv_capture_v3(pNDI_recv, &video_frame, nullptr, &metadata_frame, timeout);
|
||||||
|
switch (ret) {
|
||||||
|
case NDIlib_frame_type_none: { // No data
|
||||||
|
printf("No data received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case NDIlib_frame_type_video: { // Video data
|
||||||
|
printf("%d -- Video data received (%dx%d).\n", counter, video_frame.xres, video_frame.yres);
|
||||||
|
std::cout << "timestand & timecode: " << video_frame.timestamp << " " << video_frame.timecode << std::endl;
|
||||||
|
int len = 0;
|
||||||
|
if (video_frame.p_metadata) {
|
||||||
|
for (int i = 0; ; ++i) {
|
||||||
|
// std::cout << i << std::endl;
|
||||||
|
if (video_frame.p_metadata[i] == 0) {
|
||||||
|
len = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::cout << "metadata len = " << len << " "
|
||||||
|
<< std::string(video_frame.p_metadata)
|
||||||
|
<< std::endl;
|
||||||
|
} else {
|
||||||
|
std::cout << "video_frame.p_metadata is nullptr" << std::endl;
|
||||||
|
std::cout << "metadata len = " << len << std::endl;
|
||||||
|
}
|
||||||
|
if (!video_frame.p_metadata) {
|
||||||
|
|
||||||
|
}
|
||||||
|
std::cout << "space (video frame)" << std::endl;
|
||||||
|
NDIlib_recv_free_video_v2(pNDI_recv, &video_frame);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// case NDIlib_frame_type_audio: { // Audio data
|
||||||
|
// printf("Audio data received (%d samples).\n", audio_frame.no_samples);
|
||||||
|
// NDIlib_recv_free_audio_v3(pNDI_recv, &audio_frame);
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
case NDIlib_frame_type_metadata: {
|
||||||
|
printf("Metadata received (%d length).\n", metadata_frame.length);
|
||||||
|
std::cout << std::string(metadata_frame.p_data) << std::endl;
|
||||||
|
std::cout << "space (metadata)" << std::endl;
|
||||||
|
NDIlib_recv_free_metadata(pNDI_recv, &metadata_frame);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
printf("other case");
|
||||||
|
std::cout << ret << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy the receiver
|
||||||
|
NDIlib_recv_destroy(pNDI_recv);
|
||||||
|
|
||||||
|
// Not required, but nice
|
||||||
|
NDIlib_destroy();
|
||||||
|
|
||||||
|
// Finished
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
105
my_ndi_source.cpp
Normal file
105
my_ndi_source.cpp
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
#include <cstdio>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <cstring>
|
||||||
|
#include <chrono>
|
||||||
|
#include <iostream>
|
||||||
|
#include <csignal>
|
||||||
|
#include <sstream>
|
||||||
|
#include <Processing.NDI.Lib.h>
|
||||||
|
|
||||||
|
using namespace std::chrono;
|
||||||
|
|
||||||
|
#ifdef _WIN32
|
||||||
|
#ifdef _WIN64
|
||||||
|
#pragma comment(lib, "Processing.NDI.Lib.x64.lib")
|
||||||
|
#else // _WIN64
|
||||||
|
#pragma comment(lib, "Processing.NDI.Lib.x86.lib")
|
||||||
|
#endif // _WIN64
|
||||||
|
#endif // _WIN32
|
||||||
|
|
||||||
|
static bool is_terminated = false;
|
||||||
|
|
||||||
|
void signalHandler(int signum) {
|
||||||
|
// Ctrl+C -> SIGINT
|
||||||
|
// SystemD -> SIGTERM
|
||||||
|
std::cout << "Interrupt signal (" << signum << ") received.";
|
||||||
|
is_terminated = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
int main(int argc, char* argv[]) {
|
||||||
|
signal(SIGINT, signalHandler);
|
||||||
|
signal(SIGTERM, signalHandler);
|
||||||
|
|
||||||
|
// Not required, but "correct" (see the SDK documentation.
|
||||||
|
if (!NDIlib_initialize()) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We create the NDI sender
|
||||||
|
NDIlib_send_instance_t pNDI_send = NDIlib_send_create();
|
||||||
|
if (!pNDI_send) return 0;
|
||||||
|
|
||||||
|
int64_t total_frames_counter = 0;
|
||||||
|
|
||||||
|
while (!is_terminated) {
|
||||||
|
// Get the current time
|
||||||
|
const auto start_send = high_resolution_clock::now();
|
||||||
|
|
||||||
|
// Send 200 frames
|
||||||
|
for (int idx = 200; idx; idx--) {
|
||||||
|
// Fill in the buffer. It is likely that you would do something much smarter than this.
|
||||||
|
|
||||||
|
// We are going to create a 1920x1080 interlaced frame at 29.97Hz.
|
||||||
|
int xres = 1920;
|
||||||
|
int yres = 1920;
|
||||||
|
std::string metadata_string = "<metadata_string " + std::to_string(total_frames_counter) + ">";
|
||||||
|
NDIlib_video_frame_v2_t ndi_video_frame(
|
||||||
|
xres, yres, NDIlib_FourCC_type_BGRX,
|
||||||
|
30000, 1001, 16.0 / 9.0,
|
||||||
|
NDIlib_frame_format_type_progressive,
|
||||||
|
0,
|
||||||
|
(uint8_t*)malloc(xres * yres * 4),
|
||||||
|
0,
|
||||||
|
metadata_string.c_str()
|
||||||
|
);
|
||||||
|
// std::cout << "hi there" << std::endl;
|
||||||
|
// std::cout << "xres = " << ndi_video_frame.xres << std::endl;
|
||||||
|
// std::cout << "yres = " << ndi_video_frame.yres << std::endl;
|
||||||
|
// std::cout << "frame_rate_N = " << ndi_video_frame.frame_rate_N << std::endl;
|
||||||
|
// std::cout << "frame_rate_D = " << ndi_video_frame.frame_rate_D << std::endl;
|
||||||
|
// std::cout << "picture_aspect_ratio = " << ndi_video_frame.picture_aspect_ratio << std::endl;
|
||||||
|
// std::cout << "frame_format_type = " << ndi_video_frame.frame_format_type << std::endl;
|
||||||
|
// std::cout << "timecode = " << ndi_video_frame.timecode << std::endl;
|
||||||
|
// std::cout << "line_stride_in_bytes = " << ndi_video_frame.line_stride_in_bytes << std::endl;
|
||||||
|
// std::cout << "p_metadata = " << ndi_video_frame.p_metadata << std::endl;
|
||||||
|
// std::cout << "timestamp = " << ndi_video_frame.timestamp << std::endl;
|
||||||
|
|
||||||
|
memset((void*)ndi_video_frame.p_data, 0x33, ndi_video_frame.xres * ndi_video_frame.yres * 4);
|
||||||
|
ndi_video_frame.timestamp = total_frames_counter;
|
||||||
|
ndi_video_frame.timecode = total_frames_counter;
|
||||||
|
// memset((void*)ndi_video_frame.p_data, (idx & 1) ? 255 : 0, ndi_video_frame.xres*ndi_video_frame.yres * 4);
|
||||||
|
|
||||||
|
// We now submit the frame. Note that this call will be clocked so that we end up submitting at exactly 29.97fps.
|
||||||
|
NDIlib_send_send_video_v2(pNDI_send, &ndi_video_frame);
|
||||||
|
total_frames_counter += 1;
|
||||||
|
|
||||||
|
free(ndi_video_frame.p_data);
|
||||||
|
// Free the video frame
|
||||||
|
// NDIlib_recv_free_video(pNDI_recv, &video_frame);
|
||||||
|
// free(ndi_video_frame.p_metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Just display something helpful
|
||||||
|
printf("200 frames sent, at %1.2ffps\n", 200.0f / duration_cast<duration<float>>(high_resolution_clock::now() - start_send).count());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy the NDI sender
|
||||||
|
NDIlib_send_destroy(pNDI_send);
|
||||||
|
|
||||||
|
// Not required, but nice
|
||||||
|
NDIlib_destroy();
|
||||||
|
|
||||||
|
// Success
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
8
run_gst_loopthrough_capture.cmd
Normal file
8
run_gst_loopthrough_capture.cmd
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
rem set "PATH=%PATH%;C:\AnacondaGst_x64\Library\bin;C:\Program Files\NDI\NDI 5 SDK\Bin\x64"
|
||||||
|
call ..\PyGstTools\set-env.bat
|
||||||
|
set "STREAM_NAME=Ball"
|
||||||
|
FOR /F "tokens=* USEBACKQ" %%F IN (`hostname`) DO (SET THIS_HOST=%%F)
|
||||||
|
ECHO %THIS_HOST%
|
||||||
|
ECHO "%THIS_HOST% (%STREAM_NAME%)"
|
||||||
|
gst_get_ndi.exe C:\Users\ivan\Source\Repos\ORB_SLAM3\Vocabulary\ORBvoc.txt.tar.gz C:\Users\ivan\Source\Repos\ORB-SLAM3forWindows\Examples\Monocular\calib_data\calib1.yaml --ndi-name="%THIS_HOST% (%STREAM_NAME%)"
|
||||||
|
rem cmd.exe
|
||||||
148
try_1.cpp
Normal file
148
try_1.cpp
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
/*
|
||||||
|
* Based on this example
|
||||||
|
* https://stackoverflow.com/a/26402282/3312367
|
||||||
|
*/
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/app/gstappsink.h>
|
||||||
|
//#include <stdlib.h>
|
||||||
|
#include <cstdlib>
|
||||||
|
#include <deque>
|
||||||
|
|
||||||
|
#include "opencv2/opencv.hpp"
|
||||||
|
//using namespace cv;
|
||||||
|
|
||||||
|
// TODO: use synchronized deque
|
||||||
|
std::deque<cv::Mat> frameQueue;
|
||||||
|
|
||||||
|
GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) {
|
||||||
|
g_print ("Got preroll!\n");
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) {
|
||||||
|
static int framecount = 0;
|
||||||
|
framecount++;
|
||||||
|
|
||||||
|
GstSample *sample = gst_app_sink_pull_sample(appsink);
|
||||||
|
GstCaps *caps = gst_sample_get_caps(sample);
|
||||||
|
GstBuffer *buffer = gst_sample_get_buffer(sample);
|
||||||
|
const GstStructure *info = gst_sample_get_info(sample);
|
||||||
|
|
||||||
|
// ---- Read frame and convert to opencv format ---------------
|
||||||
|
|
||||||
|
GstMapInfo map;
|
||||||
|
gst_buffer_map (buffer, &map, GST_MAP_READ);
|
||||||
|
|
||||||
|
// convert gstreamer data to OpenCV Mat, you could actually
|
||||||
|
// resolve height / width from caps...
|
||||||
|
cv::Mat frame(cv::Size(320, 240), CV_8UC3, (char*)map.data, cv::Mat::AUTO_STEP);
|
||||||
|
int frameSize = map.size;
|
||||||
|
|
||||||
|
// TODO: synchronize this....
|
||||||
|
frameQueue.push_back(frame);
|
||||||
|
|
||||||
|
gst_buffer_unmap(buffer, &map);
|
||||||
|
|
||||||
|
// ------------------------------------------------------------
|
||||||
|
|
||||||
|
// print dot every 30 frames
|
||||||
|
if (framecount%30 == 0) {
|
||||||
|
g_print (".");
|
||||||
|
}
|
||||||
|
|
||||||
|
// show caps on first frame
|
||||||
|
if (framecount == 1) {
|
||||||
|
g_print ("%s\n", gst_caps_to_string(caps));
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_sample_unref (sample);
|
||||||
|
return GST_FLOW_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) {
|
||||||
|
g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message));
|
||||||
|
switch (GST_MESSAGE_TYPE (message)) {
|
||||||
|
case GST_MESSAGE_ERROR: {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug;
|
||||||
|
|
||||||
|
gst_message_parse_error (message, &err, &debug);
|
||||||
|
g_print ("Error: %s\n", err->message);
|
||||||
|
g_error_free (err);
|
||||||
|
g_free (debug);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
/* end-of-stream */
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* unhandled message */
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
/* we want to be notified again the next time there is a message
|
||||||
|
* on the bus, so returning TRUE (FALSE means we want to stop watching
|
||||||
|
* for messages on the bus and our callback should not be called again)
|
||||||
|
*/
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main (int argc, char *argv[]) {
|
||||||
|
GError *error = NULL;
|
||||||
|
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
gchar *descr = g_strdup(
|
||||||
|
"videotestsrc pattern=ball ! "
|
||||||
|
"video/x-raw,format=RGB ! "
|
||||||
|
"videoconvert ! "
|
||||||
|
"appsink name=sink sync=true"
|
||||||
|
);
|
||||||
|
GstElement *pipeline = gst_parse_launch (descr, &error);
|
||||||
|
|
||||||
|
if (error != NULL) {
|
||||||
|
g_print ("could not construct pipeline: %s\n", error->message);
|
||||||
|
g_error_free (error);
|
||||||
|
exit (-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* get sink */
|
||||||
|
GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
|
||||||
|
|
||||||
|
gst_app_sink_set_emit_signals((GstAppSink*)sink, true);
|
||||||
|
gst_app_sink_set_drop((GstAppSink*)sink, true);
|
||||||
|
gst_app_sink_set_max_buffers((GstAppSink*)sink, 1);
|
||||||
|
GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample };
|
||||||
|
gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL);
|
||||||
|
|
||||||
|
GstBus *bus;
|
||||||
|
guint bus_watch_id;
|
||||||
|
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
|
||||||
|
bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
cv::namedWindow("edges", 1);
|
||||||
|
bool is_terminated = false;
|
||||||
|
while (!is_terminated) {
|
||||||
|
// g_main_iteration(false);
|
||||||
|
g_main_context_iteration(NULL, false);
|
||||||
|
// TODO: synchronize...
|
||||||
|
if (frameQueue.size() > 0) {
|
||||||
|
// this lags pretty badly even when grabbing frames from webcam
|
||||||
|
cv::Mat frame = frameQueue.front();
|
||||||
|
cv::Mat edges;
|
||||||
|
cvtColor(frame, edges, cv::COLOR_BGR2BGRA);
|
||||||
|
// GaussianBlur(edges, edges, Size(7,7), 1.5, 1.5);
|
||||||
|
// Canny(edges, edges, 0, 30, 3);
|
||||||
|
cv::imshow("edges", edges);
|
||||||
|
cv::waitKey(30);
|
||||||
|
frameQueue.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
|
||||||
|
gst_object_unref (GST_OBJECT (pipeline));
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user