点阵完成,加入opencv

This commit is contained in:
2026-01-20 19:55:56 +08:00
parent 59564fd312
commit bc9f2824ed
367 changed files with 162001 additions and 52 deletions

View File

@@ -0,0 +1,149 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2019 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_CAP_HPP
#define OPENCV_GAPI_STREAMING_CAP_HPP
/**
* YOUR ATTENTION PLEASE!
*
* This is a header-only implementation of cv::VideoCapture-based
* Stream source. It is not built by default with G-API as G-API
* doesn't depend on videoio module.
*
* If you want to use it in your application, please make sure
* videioio is available in your OpenCV package and is linked to your
* application.
*
* Note for developers: please don't put videoio dependency in G-API
* because of this file.
*/
#include <chrono>
#include <map>
#include <opencv2/videoio.hpp>
#include <opencv2/gapi/garg.hpp>
#include <opencv2/gapi/streaming/meta.hpp>
namespace cv {
namespace gapi {
namespace wip {
/**
* @brief OpenCV's VideoCapture-based streaming source.
*
* This class implements IStreamSource interface.
* Its constructor takes the same parameters as cv::VideoCapture does.
*
* Please make sure that videoio OpenCV module is available before using
* this in your application (G-API doesn't depend on it directly).
*
* @note stream sources are passed to G-API via shared pointers, so
* please gapi::make_src<> to create objects and ptr() to pass a
* GCaptureSource to cv::gin().
*/
class GCaptureSource: public IStreamSource
{
public:
explicit GCaptureSource(int id, const std::map<int, double> &properties = {})
: cap(id) { prep(properties); }
explicit GCaptureSource(const std::string &path,
const std::map<int, double> &properties = {})
: cap(path) { prep(properties); }
void set(int propid, double value) {
cap.set(propid, value);
}
// TODO: Add more constructor overloads to make it
// fully compatible with VideoCapture's interface.
protected:
cv::VideoCapture cap;
cv::Mat first;
bool first_pulled = false;
int64_t counter = 0;
void prep(const std::map<int, double> &properties)
{
for (const auto &it : properties) {
cap.set(it.first, it.second);
}
// Prepare first frame to report its meta to engine
// when needed
GAPI_Assert(first.empty());
cv::Mat tmp;
if (!cap.read(tmp))
{
GAPI_Error("Couldn't grab the very first frame");
}
// NOTE: Some decode/media VideoCapture backends continue
// owning the video buffer under cv::Mat so in order to
// process it safely in a highly concurrent pipeline, clone()
// is the only right way.
first = tmp.clone();
}
virtual bool pull(cv::gapi::wip::Data &data) override
{
if (!first_pulled)
{
GAPI_Assert(!first.empty());
first_pulled = true;
data = first; // no need to clone here since it was cloned already
}
else
{
if (!cap.isOpened()) return false;
cv::Mat frame;
if (!cap.read(frame))
{
// end-of-stream happened
return false;
}
// Same reason to clone as in prep()
data = frame.clone();
}
// Tag data with seq_id/ts
const auto now = std::chrono::system_clock::now();
const auto dur = std::chrono::duration_cast<std::chrono::microseconds>
(now.time_since_epoch());
data.meta[cv::gapi::streaming::meta_tag::timestamp] = int64_t{dur.count()};
data.meta[cv::gapi::streaming::meta_tag::seq_id] = int64_t{counter++};
return true;
}
virtual GMetaArg descr_of() const override
{
GAPI_Assert(!first.empty());
return cv::GMetaArg{cv::descr_of(first)};
}
};
// NB: Overload for using from python
GAPI_EXPORTS_W cv::Ptr<IStreamSource>
inline make_capture_src(const std::string& path,
const std::map<int, double>& properties = {})
{
return make_src<GCaptureSource>(path, properties);
}
// NB: Overload for using from python
GAPI_EXPORTS_W cv::Ptr<IStreamSource>
inline make_capture_src(const int id,
const std::map<int, double>& properties = {})
{
return make_src<GCaptureSource>(id, properties);
}
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_CAP_HPP

View File

@@ -0,0 +1,86 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020-2021 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMING_DESYNC_HPP
#define OPENCV_GAPI_GSTREAMING_DESYNC_HPP
#include <tuple>
#include <opencv2/gapi/util/util.hpp>
#include <opencv2/gapi/gtype_traits.hpp>
#include <opencv2/gapi/garg.hpp>
#include <opencv2/gapi/gcall.hpp>
#include <opencv2/gapi/gkernel.hpp>
namespace cv {
namespace gapi {
namespace streaming {
namespace detail {
struct GDesync {
static const char *id() {
return "org.opencv.streaming.desync";
}
// An universal yield for desync.
// Yields output objects according to the input Types...
// Reuses gkernel machinery.
// FIXME: This function can be generic and declared in gkernel.hpp
// (it is there already, but a part of GKernelType[M]
template<typename... R, int... IIs>
static std::tuple<R...> yield(cv::GCall &call, cv::detail::Seq<IIs...>) {
return std::make_tuple(cv::detail::Yield<R>::yield(call, IIs)...);
}
};
template<typename G>
G desync(const G &g) {
cv::GKernel k{
GDesync::id() // kernel id
, "" // kernel tag
, [](const GMetaArgs &a, const GArgs &) {return a;} // outMeta callback
, {cv::detail::GTypeTraits<G>::shape} // output Shape
, {cv::detail::GTypeTraits<G>::op_kind} // input data kinds
, {cv::detail::GObtainCtor<G>::get()} // output template ctors
, {cv::detail::GTypeTraits<G>::op_kind} // output data kinds
};
cv::GCall call(std::move(k));
call.pass(g);
return std::get<0>(GDesync::yield<G>(call, cv::detail::MkSeq<1>::type()));
}
} // namespace detail
/**
* @brief Starts a desynchronized branch in the graph.
*
* This operation takes a single G-API data object and returns a
* graph-level "duplicate" of this object.
*
* Operations which use this data object can be desynchronized
* from the rest of the graph.
*
* This operation has no effect when a GComputation is compiled with
* regular cv::GComputation::compile(), since cv::GCompiled objects
* always produce their full output vectors.
*
* This operation only makes sense when a GComputation is compiled in
* streaming mode with cv::GComputation::compileStreaming(). If this
* operation is used and there are desynchronized outputs, the user
* should use a special version of cv::GStreamingCompiled::pull()
* which produces an array of cv::util::optional<> objects.
*
* @note This feature is highly experimental now and is currently
* limited to a single GMat/GFrame argument only.
*/
GAPI_EXPORTS GMat desync(const GMat &g);
GAPI_EXPORTS GFrame desync(const GFrame &f);
} // namespace streaming
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMING_DESYNC_HPP

View File

@@ -0,0 +1,94 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMING_FORMAT_HPP
#define OPENCV_GAPI_GSTREAMING_FORMAT_HPP
#include <opencv2/gapi/gkernel.hpp> // GKernelPackage
namespace cv {
namespace gapi {
namespace streaming {
GAPI_EXPORTS cv::GKernelPackage kernels();
G_API_OP(GBGR, <GMat(GFrame)>, "org.opencv.streaming.BGR")
{
static GMatDesc outMeta(const GFrameDesc& in) { return GMatDesc{CV_8U, 3, in.size}; }
};
G_API_OP(GY, <GMat(GFrame)>, "org.opencv.streaming.Y") {
static GMatDesc outMeta(const GFrameDesc& frameDesc) {
return GMatDesc { CV_8U, 1, frameDesc.size , false };
}
};
G_API_OP(GUV, <GMat(GFrame)>, "org.opencv.streaming.UV") {
static GMatDesc outMeta(const GFrameDesc& frameDesc) {
return GMatDesc { CV_8U, 2, cv::Size(frameDesc.size.width / 2, frameDesc.size.height / 2),
false };
}
};
/** @brief Gets bgr plane from input frame
@note Function textual ID is "org.opencv.streaming.BGR"
@param in Input frame
@return Image in BGR format
*/
GAPI_EXPORTS cv::GMat BGR(const cv::GFrame& in);
/** @brief Extracts Y plane from media frame.
Output image is 8-bit 1-channel image of @ref CV_8UC1.
@note Function textual ID is "org.opencv.streaming.Y"
@param frame input media frame.
*/
GAPI_EXPORTS GMat Y(const cv::GFrame& frame);
/** @brief Extracts UV plane from media frame.
Output image is 8-bit 2-channel image of @ref CV_8UC2.
@note Function textual ID is "org.opencv.streaming.UV"
@param frame input media frame.
*/
GAPI_EXPORTS GMat UV(const cv::GFrame& frame);
} // namespace streaming
//! @addtogroup gapi_transform
//! @{
/** @brief Makes a copy of the input image. Note that this copy may be not real
(no actual data copied). Use this function to maintain graph contracts,
e.g when graph's input needs to be passed directly to output, like in Streaming mode.
@note Function textual ID is "org.opencv.streaming.copy"
@param in Input image
@return Copy of the input
*/
GAPI_EXPORTS_W GMat copy(const GMat& in);
/** @brief Makes a copy of the input frame. Note that this copy may be not real
(no actual data copied). Use this function to maintain graph contracts,
e.g when graph's input needs to be passed directly to output, like in Streaming mode.
@note Function textual ID is "org.opencv.streaming.copy"
@param in Input frame
@return Copy of the input
*/
GAPI_EXPORTS GFrame copy(const GFrame& in);
//! @} gapi_transform
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMING_FORMAT_HPP

View File

@@ -0,0 +1,59 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERPIPELINE_HPP
#define OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERPIPELINE_HPP
#include <opencv2/gapi/streaming/gstreamer/gstreamersource.hpp>
#include <opencv2/gapi/own/exports.hpp>
#include <string>
#include <unordered_map>
#include <memory>
namespace cv {
namespace gapi {
namespace wip {
namespace gst {
class GAPI_EXPORTS_W GStreamerPipeline
{
public:
class Priv;
GAPI_WRAP explicit GStreamerPipeline(const std::string& pipeline);
IStreamSource::Ptr getStreamingSource(const std::string& appsinkName,
const GStreamerSource::OutputType outputType =
GStreamerSource::OutputType::MAT);
virtual ~GStreamerPipeline();
protected:
explicit GStreamerPipeline(std::unique_ptr<Priv> priv);
std::unique_ptr<Priv> m_priv;
};
} // namespace gst
using GStreamerPipeline = gst::GStreamerPipeline;
// NB: Function for using from python
// FIXME: a separate function is created due to absence of wrappers for `shared_ptr<> `
// Ideally would be to wrap the `GStreamerPipeline::getStreamingSource()` method as is
GAPI_EXPORTS_W cv::Ptr<IStreamSource>
inline get_streaming_source(cv::Ptr<GStreamerPipeline>& pipeline,
const std::string& appsinkName,
const GStreamerSource::OutputType outputType
= GStreamerSource::OutputType::MAT)
{
return pipeline->getStreamingSource(appsinkName, outputType);
}
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERPIPELINE_HPP

View File

@@ -0,0 +1,97 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERSOURCE_HPP
#define OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERSOURCE_HPP
#include <opencv2/gapi/streaming/source.hpp>
#include <opencv2/gapi/garg.hpp>
#include <memory>
namespace cv {
namespace gapi {
namespace wip {
namespace gst {
/**
* @brief OpenCV's GStreamer streaming source.
* Streams cv::Mat-s/cv::MediaFrame from passed GStreamer pipeline.
*
* This class implements IStreamSource interface.
*
* To create GStreamerSource instance you need to pass 'pipeline' and, optionally, 'outputType'
* arguments into constructor.
* 'pipeline' should represent GStreamer pipeline in form of textual description.
* Almost any custom pipeline is supported which can be successfully ran via gst-launch.
* The only two limitations are:
* - there should be __one__ appsink element in the pipeline to pass data to OpenCV app.
* Pipeline can actually contain many sink elements, but it must have one and only one
* appsink among them.
*
* - data passed to appsink should be video-frame in NV12 or GRAY8 format.
*
* 'outputType' is used to select type of output data to produce: 'cv::MediaFrame' or 'cv::Mat'.
* To produce 'cv::MediaFrame'-s you need to pass 'GStreamerSource::OutputType::FRAME' and,
* correspondingly, 'GStreamerSource::OutputType::MAT' to produce 'cv::Mat'-s.
* Please note, that in the last case, output 'cv::Mat' will be of BGR format, internal conversion
* from NV12 / GRAY8 GStreamer data will happen.
* Default value for 'outputType' is 'GStreamerSource::OutputType::MAT'.
*
* @note Stream sources are passed to G-API via shared pointers, so please use gapi::make_src<>
* to create objects and ptr() to pass a GStreamerSource to cv::gin().
*
* @note You need to build OpenCV with GStreamer support to use this class.
*/
class GStreamerPipelineFacade;
class GAPI_EXPORTS GStreamerSource : public IStreamSource
{
public:
class Priv;
// Indicates what type of data should be produced by GStreamerSource: cv::MediaFrame or cv::Mat
enum class OutputType {
FRAME,
MAT
};
GStreamerSource(const std::string& pipeline,
const GStreamerSource::OutputType outputType =
GStreamerSource::OutputType::MAT);
GStreamerSource(std::shared_ptr<GStreamerPipelineFacade> pipeline,
const std::string& appsinkName,
const GStreamerSource::OutputType outputType =
GStreamerSource::OutputType::MAT);
bool pull(cv::gapi::wip::Data& data) override;
GMetaArg descr_of() const override;
~GStreamerSource() override;
protected:
explicit GStreamerSource(std::unique_ptr<Priv> priv);
std::unique_ptr<Priv> m_priv;
};
} // namespace gst
using GStreamerSource = gst::GStreamerSource;
// NB: Overload for using from python
GAPI_EXPORTS_W cv::Ptr<IStreamSource>
inline make_gst_src(const std::string& pipeline,
const GStreamerSource::OutputType outputType =
GStreamerSource::OutputType::MAT)
{
return make_src<GStreamerSource>(pipeline, outputType);
}
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_GSTREAMER_GSTREAMERSOURCE_HPP

View File

@@ -0,0 +1,80 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMING_META_HPP
#define OPENCV_GAPI_GSTREAMING_META_HPP
#include <opencv2/gapi/gopaque.hpp>
#include <opencv2/gapi/gcall.hpp>
#include <opencv2/gapi/gkernel.hpp>
#include <opencv2/gapi/gtype_traits.hpp>
namespace cv {
namespace gapi {
namespace streaming {
// FIXME: the name is debatable
namespace meta_tag {
static constexpr const char * timestamp = "org.opencv.gapi.meta.timestamp";
static constexpr const char * seq_id = "org.opencv.gapi.meta.seq_id";
} // namespace meta_tag
namespace detail {
struct GMeta {
static const char *id() {
return "org.opencv.streaming.meta";
}
// A universal yield for meta(), same as in GDesync
template<typename... R, int... IIs>
static std::tuple<R...> yield(cv::GCall &call, cv::detail::Seq<IIs...>) {
return std::make_tuple(cv::detail::Yield<R>::yield(call, IIs)...);
}
// Also a universal outMeta stub here
static GMetaArgs getOutMeta(const GMetaArgs &args, const GArgs &) {
return args;
}
};
} // namespace detail
template<typename T, typename G>
cv::GOpaque<T> meta(G g, const std::string &tag) {
using O = cv::GOpaque<T>;
cv::GKernel k{
detail::GMeta::id() // kernel id
, tag // kernel tag. Use meta tag here
, &detail::GMeta::getOutMeta // outMeta callback
, {cv::detail::GTypeTraits<O>::shape} // output Shape
, {cv::detail::GTypeTraits<G>::op_kind} // input data kinds
, {cv::detail::GObtainCtor<O>::get()} // output template ctors
, {cv::detail::GTypeTraits<O>::op_kind} // output data kind
};
cv::GCall call(std::move(k));
call.pass(g);
return std::get<0>(detail::GMeta::yield<O>(call, cv::detail::MkSeq<1>::type()));
}
template<typename G>
cv::GOpaque<int64_t> timestamp(G g) {
return meta<int64_t>(g, meta_tag::timestamp);
}
template<typename G>
cv::GOpaque<int64_t> seq_id(G g) {
return meta<int64_t>(g, meta_tag::seq_id);
}
template<typename G>
cv::GOpaque<int64_t> seqNo(G g) {
// Old name, compatibility only
return seq_id(g);
}
} // namespace streaming
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMING_META_HPP

View File

@@ -0,0 +1,76 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2022 Intel Corporation
#ifndef GAPI_STREAMING_ONEVPL_ACCEL_TYPES_HPP
#define GAPI_STREAMING_ONEVPL_ACCEL_TYPES_HPP
#include <limits>
#include <string>
#include "opencv2/gapi/own/exports.hpp" // GAPI_EXPORTS
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
enum class AccelType: uint8_t {
HOST,
DX11,
VAAPI,
LAST_VALUE = std::numeric_limits<uint8_t>::max()
};
GAPI_EXPORTS const char* to_cstring(AccelType type);
struct IDeviceSelector;
struct GAPI_EXPORTS Device {
friend struct IDeviceSelector;
using Ptr = void*;
~Device();
const std::string& get_name() const;
Ptr get_ptr() const;
AccelType get_type() const;
private:
Device(Ptr device_ptr, const std::string& device_name,
AccelType device_type);
std::string name;
Ptr ptr;
AccelType type;
};
struct GAPI_EXPORTS Context {
friend struct IDeviceSelector;
using Ptr = void*;
~Context();
Ptr get_ptr() const;
AccelType get_type() const;
private:
Context(Ptr ctx_ptr, AccelType ctx_type);
Ptr ptr;
AccelType type;
};
GAPI_EXPORTS Device create_host_device();
GAPI_EXPORTS Context create_host_context();
GAPI_EXPORTS Device create_dx11_device(Device::Ptr device_ptr,
const std::string& device_name);
GAPI_EXPORTS Context create_dx11_context(Context::Ptr ctx_ptr);
GAPI_EXPORTS Device create_vaapi_device(Device::Ptr device_ptr,
const std::string& device_name);
GAPI_EXPORTS Context create_vaapi_context(Context::Ptr ctx_ptr);
} // namespace onevpl
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // GAPI_STREAMING_ONEVPL_ACCEL_TYPES_HPP

View File

@@ -0,0 +1,209 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_ONEVPL_CFG_PARAMS_HPP
#define OPENCV_GAPI_STREAMING_ONEVPL_CFG_PARAMS_HPP
#include <map>
#include <memory>
#include <string>
#include <opencv2/gapi/streaming/source.hpp>
#include <opencv2/gapi/util/variant.hpp>
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
/**
* @brief Public class is using for creation of onevpl::GSource instances.
*
* Class members available through methods @ref CfgParam::get_name() and @ref CfgParam::get_value() are used by
* onevpl::GSource inner logic to create or find oneVPL particular implementation
* (software/hardware, specific API version and etc.).
*
* @note Because oneVPL may provide several implementations which are satisfying with multiple (or single one) @ref CfgParam
* criteria therefore it is possible to configure `preferred` parameters. This kind of CfgParams are created
* using `is_major = false` argument in @ref CfgParam::create method and are not used by creating oneVPL particular implementations.
* Instead they fill out a "score table" to select preferable implementation from available list. Implementation are satisfying
* with most of these optional params would be chosen.
* If no one optional CfgParam params were present then first of available oneVPL implementation would be applied.
* Please get on https://spec.oneapi.io/versions/latest/elements/oneVPL/source/API_ref/VPL_disp_api_func.html?highlight=mfxcreateconfig#mfxsetconfigfilterproperty
* for using OneVPL configuration. In this schema `mfxU8 *name` represents @ref CfgParam::get_name() and
* `mfxVariant value` is @ref CfgParam::get_value()
*/
struct GAPI_EXPORTS CfgParam {
using name_t = std::string;
using value_t = cv::util::variant<uint8_t, int8_t,
uint16_t, int16_t,
uint32_t, int32_t,
uint64_t, int64_t,
float_t,
double_t,
void*,
std::string>;
/**
* @brief frames_pool_size_name
*
* Special configuration parameter name for onevp::GSource:
*
* @note frames_pool_size_name allows to allocate surfaces pool appropriate size to keep
* decoded frames in accelerator memory ready before
* they would be consumed by onevp::GSource::pull operation. If you see
* a lot of WARNING about lack of free surface then it's time to increase
* frames_pool_size_name but be aware of accelerator free memory volume.
* If not set then MFX implementation use
* mfxFrameAllocRequest::NumFrameSuggested behavior
*
*/
static constexpr const char *frames_pool_size_name() { return "frames_pool_size"; }
static CfgParam create_frames_pool_size(size_t value);
/**
* @brief acceleration_mode_name
*
* Special configuration parameter names for onevp::GSource:
*
* @note acceleration_mode_name allows to activate hardware acceleration &
* device memory management.
* Supported values:
* - MFX_ACCEL_MODE_VIA_D3D11 Will activate DX11 acceleration and will produces
* MediaFrames with data allocated in DX11 device memory
*
* If not set then MFX implementation will use default acceleration behavior:
* all decoding operation uses default GPU resources but MediaFrame produces
* data allocated by using host RAM
*
*/
static constexpr const char *acceleration_mode_name() { return "mfxImplDescription.AccelerationMode"; }
static CfgParam create_acceleration_mode(uint32_t value);
static CfgParam create_acceleration_mode(const char* value);
/**
* @brief decoder_id_name
*
* Special configuration parameter names for onevp::GSource:
*
* @note decoder_id_name allows to specify VPL decoder type which MUST present
* in case of RAW video input data and MUST NOT present as CfgParam if video
* stream incapsulated into container(*.mp4, *.mkv and so on). In latter case
* onevp::GSource will determine it automatically
* Supported values:
* - MFX_CODEC_AVC
* - MFX_CODEC_HEVC
* - MFX_CODEC_MPEG2
* - MFX_CODEC_VC1
* - MFX_CODEC_CAPTURE
* - MFX_CODEC_VP9
* - MFX_CODEC_AV1
*
*/
static constexpr const char *decoder_id_name() { return "mfxImplDescription.mfxDecoderDescription.decoder.CodecID"; }
static CfgParam create_decoder_id(uint32_t value);
static CfgParam create_decoder_id(const char* value);
static constexpr const char *implementation_name() { return "mfxImplDescription.Impl"; }
static CfgParam create_implementation(uint32_t value);
static CfgParam create_implementation(const char* value);
static constexpr const char *vpp_frames_pool_size_name() { return "vpp_frames_pool_size"; }
static CfgParam create_vpp_frames_pool_size(size_t value);
static constexpr const char *vpp_in_width_name() { return "vpp.In.Width"; }
static CfgParam create_vpp_in_width(uint16_t value);
static constexpr const char *vpp_in_height_name() { return "vpp.In.Height"; }
static CfgParam create_vpp_in_height(uint16_t value);
static constexpr const char *vpp_in_crop_x_name() { return "vpp.In.CropX"; }
static CfgParam create_vpp_in_crop_x(uint16_t value);
static constexpr const char *vpp_in_crop_y_name() { return "vpp.In.CropY"; }
static CfgParam create_vpp_in_crop_y(uint16_t value);
static constexpr const char *vpp_in_crop_w_name() { return "vpp.In.CropW"; }
static CfgParam create_vpp_in_crop_w(uint16_t value);
static constexpr const char *vpp_in_crop_h_name() { return "vpp.In.CropH"; }
static CfgParam create_vpp_in_crop_h(uint16_t value);
static constexpr const char *vpp_out_fourcc_name() { return "vpp.Out.FourCC"; }
static CfgParam create_vpp_out_fourcc(uint32_t value);
static constexpr const char *vpp_out_chroma_format_name() { return "vpp.Out.ChromaFormat"; }
static CfgParam create_vpp_out_chroma_format(uint16_t value);
static constexpr const char *vpp_out_width_name() { return "vpp.Out.Width"; }
static CfgParam create_vpp_out_width(uint16_t value);
static constexpr const char *vpp_out_height_name() { return "vpp.Out.Height"; }
static CfgParam create_vpp_out_height(uint16_t value);
static constexpr const char *vpp_out_crop_x_name() { return "vpp.Out.CropX"; }
static CfgParam create_vpp_out_crop_x(uint16_t value);
static constexpr const char *vpp_out_crop_y_name() { return "vpp.Out.CropY"; }
static CfgParam create_vpp_out_crop_y(uint16_t value);
static constexpr const char *vpp_out_crop_w_name() { return "vpp.Out.CropW"; }
static CfgParam create_vpp_out_crop_w(uint16_t value);
static constexpr const char *vpp_out_crop_h_name() { return "vpp.Out.CropH"; }
static CfgParam create_vpp_out_crop_h(uint16_t value);
static constexpr const char *vpp_out_pic_struct_name() { return "vpp.Out.PicStruct"; }
static CfgParam create_vpp_out_pic_struct(uint16_t value);
static constexpr const char *vpp_out_framerate_n_name() { return "vpp.Out.FrameRateExtN"; }
static CfgParam create_vpp_out_framerate_n(uint32_t value);
static constexpr const char *vpp_out_framerate_d_name() { return "vpp.Out.FrameRateExtD"; }
static CfgParam create_vpp_out_framerate_d(uint32_t value);
/**
* Create generic onevp::GSource configuration parameter.
*
*@param name name of parameter.
*@param value value of parameter.
*@param is_major TRUE if parameter MUST be provided by OneVPL inner implementation, FALSE for optional (for resolve multiple available implementations).
*
*/
template<typename ValueType>
static CfgParam create(const std::string& name, ValueType&& value, bool is_major = true) {
CfgParam param(name, CfgParam::value_t(std::forward<ValueType>(value)), is_major);
return param;
}
struct Priv;
const name_t& get_name() const;
const value_t& get_value() const;
bool is_major() const;
std::string to_string() const;
bool operator==(const CfgParam& rhs) const;
bool operator< (const CfgParam& rhs) const;
bool operator!=(const CfgParam& rhs) const;
CfgParam& operator=(const CfgParam& src);
CfgParam& operator=(CfgParam&& src);
CfgParam(const CfgParam& src);
CfgParam(CfgParam&& src);
~CfgParam();
private:
CfgParam(const std::string& param_name, value_t&& param_value, bool is_major_param);
std::shared_ptr<Priv> m_priv;
};
} //namespace onevpl
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_ONEVPL_CFG_PARAMS_HPP

View File

@@ -0,0 +1,105 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef GAPI_STREAMING_ONEVPL_ONEVPL_DATA_PROVIDER_INTERFACE_HPP
#define GAPI_STREAMING_ONEVPL_ONEVPL_DATA_PROVIDER_INTERFACE_HPP
#include <exception>
#include <memory>
#include <string>
#include <opencv2/gapi/own/exports.hpp> // GAPI_EXPORTS
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
struct GAPI_EXPORTS DataProviderException : public std::exception {
DataProviderException(const std::string& descr);
DataProviderException(std::string&& descr);
virtual ~DataProviderException() = default;
virtual const char* what() const noexcept override;
private:
std::string reason;
};
struct GAPI_EXPORTS DataProviderSystemErrorException final : public DataProviderException {
DataProviderSystemErrorException(int error_code, const std::string& description = std::string());
~DataProviderSystemErrorException() = default;
};
struct GAPI_EXPORTS DataProviderUnsupportedException final : public DataProviderException {
DataProviderUnsupportedException(const std::string& description);
~DataProviderUnsupportedException() = default;
};
struct GAPI_EXPORTS DataProviderImplementationException : public DataProviderException {
DataProviderImplementationException(const std::string& description);
~DataProviderImplementationException() = default;
};
/**
* @brief Public interface allows to customize extraction of video stream data
* used by onevpl::GSource instead of reading stream from file (by default).
*
* Interface implementation constructor MUST provide consistency and creates fully operable object.
* If error happened implementation MUST throw `DataProviderException` kind exceptions
*
* @note Interface implementation MUST manage stream and other constructed resources by itself to avoid any kind of leak.
* For simple interface implementation example please see `StreamDataProvider` in `tests/streaming/gapi_streaming_tests.cpp`
*/
struct GAPI_EXPORTS IDataProvider {
using Ptr = std::shared_ptr<IDataProvider>;
using mfx_codec_id_type = uint32_t;
/**
* NB: here is supposed to be forward declaration of mfxBitstream
* But according to current oneVPL implementation it is impossible to forward
* declare untagged struct mfxBitstream.
*
* IDataProvider makes sense only for HAVE_VPL is ON and to keep IDataProvider
* interface API/ABI compliant between core library and user application layer
* let's introduce wrapper mfx_bitstream which inherits mfxBitstream in private
* G-API code section and declare forward for wrapper mfx_bitstream here
*/
struct mfx_bitstream;
virtual ~IDataProvider() = default;
/**
* The function is used by onevpl::GSource to extract codec id from data
*
*/
virtual mfx_codec_id_type get_mfx_codec_id() const = 0;
/**
* The function is used by onevpl::GSource to extract binary data stream from @ref IDataProvider
* implementation.
*
* It MUST throw `DataProviderException` kind exceptions in fail cases.
* It MUST return MFX_ERR_MORE_DATA in EOF which considered as not-fail case.
*
* @param in_out_bitsream the input-output reference on MFX bitstream buffer which MUST be empty at the first request
* to allow implementation to allocate it by itself and to return back. Subsequent invocation of `fetch_bitstream_data`
* MUST use the previously used in_out_bitsream to avoid skipping rest of frames which haven't been consumed
* @return true for fetched data, false on EOF and throws exception on error
*/
virtual bool fetch_bitstream_data(std::shared_ptr<mfx_bitstream> &in_out_bitsream) = 0;
/**
* The function is used by onevpl::GSource to check more binary data availability.
*
* It MUST return TRUE in case of EOF and NO_THROW exceptions.
*
* @return boolean value which detects end of stream
*/
virtual bool empty() const = 0;
};
} // namespace onevpl
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // GAPI_STREAMING_ONEVPL_ONEVPL_DATA_PROVIDER_INTERFACE_HPP

View File

@@ -0,0 +1,29 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2022 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_ONEVPL_UTILS_HPP
#define OPENCV_GAPI_STREAMING_ONEVPL_UTILS_HPP
#include <opencv2/gapi/own/exports.hpp> // GAPI_EXPORTS
#include <opencv2/gapi/streaming/onevpl/cfg_params.hpp>
#include <opencv2/gapi/streaming/onevpl/device_selector_interface.hpp>
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
/**
* @brief Provides default device selector based on config.
*/
GAPI_EXPORTS std::shared_ptr<IDeviceSelector> getDefaultDeviceSelector(const std::vector<CfgParam>& cfg_params);
} // namespace onevpl
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_ONEVPL_UTILS_HPP

View File

@@ -0,0 +1,61 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef GAPI_STREAMING_ONEVPL_DEVICE_SELECTOR_INTERFACE_HPP
#define GAPI_STREAMING_ONEVPL_DEVICE_SELECTOR_INTERFACE_HPP
#include <limits>
#include <map>
#include <string>
#include <vector>
#include <opencv2/gapi/streaming/onevpl/accel_types.hpp>
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
struct GAPI_EXPORTS IDeviceSelector {
using Ptr = std::shared_ptr<IDeviceSelector>;
struct GAPI_EXPORTS Score {
friend struct IDeviceSelector;
using Type = int16_t;
static constexpr Type MaxActivePriority = std::numeric_limits<Type>::max();
static constexpr Type MinActivePriority = 0;
static constexpr Type MaxPassivePriority = MinActivePriority - 1;
static constexpr Type MinPassivePriority = std::numeric_limits<Type>::min();
Score(Type val);
~Score();
operator Type () const;
Type get() const;
friend bool operator< (Score lhs, Score rhs) {
return lhs.get() < rhs.get();
}
private:
Type value;
};
using DeviceScoreTable = std::map<Score, Device>;
using DeviceContexts = std::vector<Context>;
virtual ~IDeviceSelector();
virtual DeviceScoreTable select_devices() const = 0;
virtual DeviceContexts select_context() = 0;
protected:
template<typename Entity, typename ...Args>
static Entity create(Args &&...args) {
return Entity(std::forward<Args>(args)...);
}
};
} // namespace onevpl
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // GAPI_STREAMING_ONEVPL_DEVICE_SELECTOR_INTERFACE_HPP

View File

@@ -0,0 +1,94 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_ONEVPL_ONEVPL_SOURCE_HPP
#define OPENCV_GAPI_STREAMING_ONEVPL_ONEVPL_SOURCE_HPP
#include <opencv2/gapi/garg.hpp>
#include <opencv2/gapi/streaming/meta.hpp>
#include <opencv2/gapi/streaming/source.hpp>
#include <opencv2/gapi/streaming/onevpl/cfg_params.hpp>
#include <opencv2/gapi/streaming/onevpl/data_provider_interface.hpp>
#include <opencv2/gapi/streaming/onevpl/device_selector_interface.hpp>
namespace cv {
namespace gapi {
namespace wip {
namespace onevpl {
using CfgParams = std::vector<CfgParam>;
/**
* @brief G-API streaming source based on OneVPL implementation.
*
* This class implements IStreamSource interface.
* Its constructor takes source file path (in usual way) or @ref onevpl::IDataProvider
* interface implementation (for not file-based sources). It also allows to pass-through
* oneVPL configuration parameters by using several @ref onevpl::CfgParam.
*
* @note stream sources are passed to G-API via shared pointers, so
* please gapi::make_onevpl_src<> to create objects and ptr() to pass a
* GSource to cv::gin().
*/
class GAPI_EXPORTS GSource : public IStreamSource
{
public:
struct Priv;
GSource(const std::string& filePath,
const CfgParams& cfg_params = CfgParams{});
GSource(const std::string& filePath,
const CfgParams& cfg_params,
const std::string& device_id,
void* accel_device_ptr,
void* accel_ctx_ptr);
GSource(const std::string& filePath,
const CfgParams& cfg_params,
const Device &device, const Context &ctx);
GSource(const std::string& filePath,
const CfgParams& cfg_params,
std::shared_ptr<IDeviceSelector> selector);
GSource(std::shared_ptr<IDataProvider> source,
const CfgParams& cfg_params = CfgParams{});
GSource(std::shared_ptr<IDataProvider> source,
const CfgParams& cfg_params,
const std::string& device_id,
void* accel_device_ptr,
void* accel_ctx_ptr);
GSource(std::shared_ptr<IDataProvider> source,
const CfgParams& cfg_params,
std::shared_ptr<IDeviceSelector> selector);
~GSource() override;
bool pull(cv::gapi::wip::Data& data) override;
GMetaArg descr_of() const override;
private:
explicit GSource(std::unique_ptr<Priv>&& impl);
std::unique_ptr<Priv> m_priv;
};
} // namespace onevpl
using GVPLSource = onevpl::GSource;
template<class... Args>
GAPI_EXPORTS_W cv::Ptr<IStreamSource> inline make_onevpl_src(Args&&... args)
{
return make_src<onevpl::GSource>(std::forward<Args>(args)...);
}
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_ONEVPL_ONEVPL_SOURCE_HPP

View File

@@ -0,0 +1,67 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2023 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_QUEUE_SOURCE_HPP
#define OPENCV_GAPI_STREAMING_QUEUE_SOURCE_HPP
#include <memory> // shared_ptr
#include <type_traits> // is_base_of
#include <opencv2/gapi/garg.hpp> // GRunArgs
#include <opencv2/gapi/gmetaarg.hpp> // GMetaArg + all descr_of
#include <opencv2/gapi/streaming/source.hpp> // IStreamSource
namespace cv {
namespace gapi {
namespace wip {
struct Data; // fwd-declare to avoid circular? header dependencies
class GAPI_EXPORTS QueueSourceBase: public cv::gapi::wip::IStreamSource {
class Priv;
std::shared_ptr<Priv> m_priv;
// FIXME: Need to understand how it works with IStreamSource's shared_from_this
// Can we avoid having too many shared_ptrs here?
public:
explicit QueueSourceBase(const cv::GMetaArg &m);
void push(Data &&data);
virtual bool pull(Data &data) override;
virtual void halt() override;
virtual GMetaArg descr_of() const override;
virtual ~QueueSourceBase() = default;
};
/**
* @brief Queued streaming pipeline source.
*
*/
template<class T>
class QueueSource final: public QueueSourceBase
{
public:
using Meta = decltype(cv::descr_of(T{}));
explicit QueueSource(Meta m) : QueueSourceBase(GMetaArg{m}) {
}
void push(T t) {
QueueSourceBase::push(Data{t});
}
};
class GAPI_EXPORTS QueueInput {
std::vector<std::shared_ptr<QueueSourceBase> > m_sources;
public:
explicit QueueInput(const cv::GMetaArgs &args);
void push(cv::GRunArgs &&ins);
operator cv::GRunArgs();
};
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_SOURCE_HPP

View File

@@ -0,0 +1,67 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2019 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_SOURCE_HPP
#define OPENCV_GAPI_STREAMING_SOURCE_HPP
#include <memory> // shared_ptr
#include <type_traits> // is_base_of
#include <opencv2/gapi/gmetaarg.hpp> // GMetaArg
namespace cv {
namespace gapi {
namespace wip {
struct Data; // forward-declaration of Data to avoid circular dependencies
/**
* @brief Abstract streaming pipeline source.
*
* Implement this interface if you want customize the way how data is
* streaming into GStreamingCompiled.
*
* Objects implementing this interface can be passed to
* GStreamingCompiled using setSource() with cv::gin(). Regular
* compiled graphs (GCompiled) don't support input objects of this
* type.
*
* Default cv::VideoCapture-based implementation is available, see
* cv::gapi::wip::GCaptureSource.
*
* @note stream sources are passed to G-API via shared pointers, so
* please use ptr() when passing a IStreamSource implementation to
* cv::gin().
*/
class IStreamSource: public std::enable_shared_from_this<IStreamSource>
{
public:
using Ptr = std::shared_ptr<IStreamSource>;
Ptr ptr() { return shared_from_this(); }
virtual bool pull(Data &data) = 0;
virtual GMetaArg descr_of() const = 0;
virtual void halt() {
// Do nothing by default to maintain compatibility with the existing sources...
// In fact needs to be decorated atop of the child classes to maintain the behavior
// FIXME: Make it mandatory in OpenCV 5.0
};
virtual ~IStreamSource() = default;
};
template<class T, class... Args>
IStreamSource::Ptr inline make_src(Args&&... args)
{
static_assert(std::is_base_of<IStreamSource, T>::value,
"T must implement the cv::gapi::IStreamSource interface!");
auto src_ptr = std::make_shared<T>(std::forward<Args>(args)...);
return src_ptr->ptr();
}
} // namespace wip
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_SOURCE_HPP

View File

@@ -0,0 +1,30 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STREAMING_SYNC_HPP
#define OPENCV_GAPI_STREAMING_SYNC_HPP
namespace cv {
namespace gapi {
namespace streaming {
enum class sync_policy {
dont_sync,
drop
};
} // namespace streaming
} // namespace gapi
namespace detail {
template<> struct CompileArgTag<gapi::streaming::sync_policy> {
static const char* tag() { return "gapi.streaming.sync_policy"; }
};
} // namespace detail
} // namespace cv
#endif // OPENCV_GAPI_STREAMING_SYNC_HPP