提交 43b2bb2c 编写于 作者: A Alexander Alekhin

dnn: plugin support for OpenVINO

上级 ca7f9641
......@@ -1094,6 +1094,18 @@ macro(ocv_list_filterout lst regex)
endforeach()
endmacro()
# Usage: ocv_list_filterout_ex(list_name regex1 regex2 ...)
macro(ocv_list_filterout_ex lst)
foreach(regex ${ARGN})
foreach(item ${${lst}})
if(item MATCHES "${regex}")
list(REMOVE_ITEM ${lst} "${item}")
endif()
endforeach()
endforeach()
endmacro()
# filter matching elements from the list
macro(ocv_list_filter lst regex)
set(dst ${ARGN})
......
......@@ -62,7 +62,7 @@ CV_EXPORTS void glob_relative(const cv::String& directory, const cv::String& pat
CV_EXPORTS bool createDirectory(const cv::String& path);
CV_EXPORTS bool createDirectories(const cv::String& path);
#ifdef __OPENCV_BUILD
#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN)
// TODO
//CV_EXPORTS cv::String getTempDirectory();
......
......@@ -13,18 +13,22 @@ ocv_add_dispatched_file_force_all("int8layers/layers_common" AVX2 AVX512_SKX)
ocv_add_module(dnn opencv_core opencv_imgproc WRAP python java objc js)
include(${CMAKE_CURRENT_LIST_DIR}/cmake/plugin.cmake)
ocv_option(OPENCV_DNN_OPENCL "Build with OpenCL support" HAVE_OPENCL AND NOT APPLE)
if(OPENCV_DNN_OPENCL AND HAVE_OPENCL)
add_definitions(-DCV_OCL4DNN=1)
ocv_target_compile_definitions(${the_module} PRIVATE "CV_OCL4DNN=1")
endif()
if(WITH_WEBNN AND HAVE_WEBNN)
add_definitions(-DHAVE_WEBNN=1)
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_WEBNN=1")
endif()
if(HAVE_TIMVX)
add_definitions(-DHAVE_TIMVX=1)
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TIMVX=1")
endif()
ocv_option(OPENCV_DNN_CUDA "Build with CUDA support"
......@@ -35,7 +39,7 @@ ocv_option(OPENCV_DNN_CUDA "Build with CUDA support"
if(OPENCV_DNN_CUDA)
if(HAVE_CUDA AND HAVE_CUBLAS AND HAVE_CUDNN)
add_definitions(-DCV_CUDA4DNN=1)
ocv_target_compile_definitions(${the_module} PRIVATE "CV_CUDA4DNN=1")
else()
if(NOT HAVE_CUDA)
message(SEND_ERROR "DNN: CUDA backend requires CUDA Toolkit. Please resolve dependency or disable OPENCV_DNN_CUDA=OFF")
......@@ -47,12 +51,15 @@ if(OPENCV_DNN_CUDA)
endif()
endif()
ocv_cmake_hook_append(INIT_MODULE_SOURCES_opencv_dnn "${CMAKE_CURRENT_LIST_DIR}/cmake/hooks/INIT_MODULE_SOURCES_opencv_dnn.cmake")
if(HAVE_TENGINE)
add_definitions(-DHAVE_TENGINE=1)
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_TENGINE=1")
endif()
if(MSVC)
add_definitions( -D_CRT_SECURE_NO_WARNINGS=1 )
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146
......@@ -87,10 +94,10 @@ if(ANDROID)
endif()
if(NOT BUILD_PROTOBUF)
add_definitions(-DOPENCV_DNN_EXTERNAL_PROTOBUF=1)
ocv_target_compile_definitions(${the_module} PRIVATE "OPENCV_DNN_EXTERNAL_PROTOBUF=1")
endif()
add_definitions(-DHAVE_PROTOBUF=1)
ocv_target_compile_definitions(${the_module} PRIVATE "HAVE_PROTOBUF=1")
#suppress warnings in autogenerated caffe.pb.* files
ocv_warnings_disable(CMAKE_CXX_FLAGS
......@@ -175,12 +182,34 @@ endif()
set(dnn_runtime_libs "")
file(GLOB_RECURSE dnn_srcs
"${CMAKE_CURRENT_LIST_DIR}/src/*.cpp"
)
file(GLOB_RECURSE dnn_int_hdrs
"${CMAKE_CURRENT_LIST_DIR}/src/*.hpp"
"${CMAKE_CURRENT_LIST_DIR}/src/*.h"
)
set(dnn_plugin_srcs ${dnn_srcs} ${dnn_int_hdrs})
ocv_list_filterout_ex(dnn_plugin_srcs
"/src/dnn.cpp$|/src/dnn_utils.cpp$|/src/dnn_utils.cpp$|/src/dnn_read.cpp$|/src/registry.cpp$|/src/backend.cpp$"
# importers
"/src/(caffe|darknet|onnx|tensorflow|torch)/"
# executors
"/src/(cuda|cuda4dnn|ocl4dnn|vkcom|webnn)/"
)
ocv_option(OPENCV_DNN_OPENVINO "Build with OpenVINO support (2021.4+)" (TARGET ocv.3rdparty.openvino))
if(TARGET ocv.3rdparty.openvino AND OPENCV_DNN_OPENVINO)
if(NOT HAVE_OPENVINO AND NOT HAVE_NGRAPH)
message(FATAL_ERROR "DNN: Inference Engine is not supported without enabled 'nGraph'. Check build configuration.")
endif()
list(APPEND dnn_runtime_libs ocv.3rdparty.openvino)
if("openvino" IN_LIST DNN_PLUGIN_LIST OR DNN_PLUGIN_LIST STREQUAL "all")
# plugin doesn't support PCH, separate directory scope is necessary
# opencv_world requires absolute path
add_subdirectory("${CMAKE_CURRENT_LIST_DIR}/misc/plugin/openvino" "${CMAKE_CURRENT_BINARY_DIR}/dnn_plugin_openvino")
elseif(NOT OPENCV_DNN_BUILTIN_BACKEND)
list(APPEND dnn_runtime_libs ocv.3rdparty.openvino)
endif()
endif()
ocv_glob_module_sources(${sources_options} SOURCES ${fw_srcs} ${webnn_srcs})
......@@ -205,7 +234,7 @@ if(BUILD_PERF_TESTS)
)
find_package(Caffe QUIET)
if (Caffe_FOUND)
add_definitions(-DHAVE_CAFFE=1)
ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CAFFE=1")
ocv_target_link_libraries(opencv_perf_dnn caffe)
endif()
elseif(OPENCV_DNN_PERF_CLCAFFE
......@@ -213,8 +242,25 @@ if(BUILD_PERF_TESTS)
)
find_package(Caffe QUIET)
if (Caffe_FOUND)
add_definitions(-DHAVE_CLCAFFE=1)
ocv_target_compile_definitions(opencv_perf_dnn PRIVATE "HAVE_CLCAFFE=1")
ocv_target_link_libraries(opencv_perf_dnn caffe)
endif()
endif()
endif()
if(DNN_ENABLE_PLUGINS)
ocv_target_compile_definitions(${the_module} PRIVATE ENABLE_PLUGINS)
if(TARGET opencv_test_dnn)
ocv_target_compile_definitions(opencv_test_dnn PRIVATE ENABLE_PLUGINS)
endif()
if(OPENCV_DEBUG_POSTFIX)
ocv_append_source_file_compile_definitions("${CMAKE_CURRENT_LIST_DIR}/src/backend.cpp" "DEBUG_POSTFIX=${OPENCV_DEBUG_POSTFIX}")
endif()
endif()
ocv_option(OPENCV_TEST_DNN_OPENVINO "Build test with OpenVINO code" (TARGET ocv.3rdparty.openvino))
if(TARGET ocv.3rdparty.openvino AND OPENCV_TEST_DNN_OPENVINO)
if(TARGET opencv_test_dnn)
ocv_target_link_libraries(opencv_test_dnn ocv.3rdparty.openvino)
endif()
endif()
if(PROJECT_NAME STREQUAL "OpenCV")
set(ENABLE_PLUGINS_DEFAULT ON)
if(EMSCRIPTEN OR IOS OR WINRT)
set(ENABLE_PLUGINS_DEFAULT OFF)
endif()
set(DNN_PLUGIN_LIST "" CACHE STRING "List of DNN backends to be compiled as plugins (openvino, etc or special value 'all')")
set(DNN_ENABLE_PLUGINS "${ENABLE_PLUGINS_DEFAULT}" CACHE BOOL "Allow building and using of DNN plugins")
mark_as_advanced(DNN_PLUGIN_LIST DNN_ENABLE_PLUGINS)
string(REPLACE "," ";" DNN_PLUGIN_LIST "${DNN_PLUGIN_LIST}") # support comma-separated list (,) too
string(TOLOWER "${DNN_PLUGIN_LIST}" DNN_PLUGIN_LIST)
if(NOT DNN_ENABLE_PLUGINS)
if(DNN_PLUGIN_LIST)
message(WARNING "DNN: plugins are disabled through DNN_ENABLE_PLUGINS, so DNN_PLUGIN_LIST='${DNN_PLUGIN_LIST}' is ignored")
set(DNN_PLUGIN_LIST "")
endif()
else()
# Make virtual plugins target
if(NOT TARGET opencv_dnn_plugins)
add_custom_target(opencv_dnn_plugins ALL)
endif()
endif()
endif()
#
# Detect available dependencies
#
# OpenVINO - detected by main CMake scripts (shared with G-API)
function(ocv_create_builtin_dnn_plugin name target)
ocv_debug_message("ocv_create_builtin_dnn_plugin(${ARGV})")
if(NOT TARGET ${target})
message(FATAL_ERROR "${target} does not exist!")
endif()
if(NOT OpenCV_SOURCE_DIR)
message(FATAL_ERROR "OpenCV_SOURCE_DIR must be set to build the plugin!")
endif()
message(STATUS "DNN: add builtin plugin '${name}'")
set(ENABLE_PRECOMPILED_HEADERS OFF) # no support for PCH in plugins, conflicts with module's source files
# TODO: update CPU optimizations scripts to support plugins
add_definitions(-D__OPENCV_BUILD=1)
add_definitions(-DBUILD_PLUGIN=1)
include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp'
foreach(src ${ARGN})
if(EXISTS "${CMAKE_CURRENT_LIST_DIR}/src/${src}")
list(APPEND sources "${CMAKE_CURRENT_LIST_DIR}/src/${src}")
elseif(IS_ABSOLUTE "${src}")
list(APPEND sources "${src}")
else()
message(FATAL_ERROR "Unknown source: ${src}")
endif()
endforeach()
if(OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED)
list(APPEND sources ${OPENCV_MODULE_${the_module}_SOURCES_DISPATCHED})
endif()
set(__${name}_DEPS_EXT "")
ocv_compiler_optimization_process_sources(sources __${name}_DEPS_EXT ${name})
add_library(${name} MODULE ${sources})
target_include_directories(${name} PRIVATE "${CMAKE_CURRENT_BINARY_DIR}")
target_link_libraries(${name} PRIVATE ${target} ${__${name}_DEPS_EXT})
target_link_libraries(${name} PRIVATE ${__plugin_libs})
foreach(mod opencv_dnn
opencv_core
opencv_imgproc
opencv_dnn
)
ocv_target_link_libraries(${name} LINK_PRIVATE ${mod})
ocv_target_include_directories(${name} "${OPENCV_MODULE_${mod}_LOCATION}/include")
endforeach()
if(WIN32)
set(OPENCV_PLUGIN_VERSION "${OPENCV_DLLVERSION}" CACHE STRING "")
if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8)
set(OPENCV_PLUGIN_ARCH "_64" CACHE STRING "")
else()
set(OPENCV_PLUGIN_ARCH "" CACHE STRING "")
endif()
else()
set(OPENCV_PLUGIN_VERSION "" CACHE STRING "")
set(OPENCV_PLUGIN_ARCH "" CACHE STRING "")
endif()
set_target_properties(${name} PROPERTIES
CXX_STANDARD 11
CXX_VISIBILITY_PRESET hidden
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
OUTPUT_NAME "${name}${OPENCV_PLUGIN_VERSION}${OPENCV_PLUGIN_ARCH}"
)
if(WIN32)
set_target_properties(${name} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH})
install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT plugins)
else()
install(TARGETS ${name} OPTIONAL LIBRARY DESTINATION ${OPENCV_LIB_INSTALL_PATH} COMPONENT plugins)
endif()
add_dependencies(opencv_dnn_plugins ${name})
endfunction()
......@@ -81,9 +81,11 @@ CV__DNN_INLINE_NS_BEGIN
DNN_BACKEND_CUDA,
DNN_BACKEND_WEBNN,
DNN_BACKEND_TIMVX,
#ifdef __OPENCV_BUILD
#if defined(__OPENCV_BUILD) || defined(BUILD_PLUGIN)
#if !defined(OPENCV_BINDING_PARSER)
DNN_BACKEND_INFERENCE_ENGINE_NGRAPH = 1000000, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, // internal - use DNN_BACKEND_INFERENCE_ENGINE + setInferenceEngineBackendType()
#endif
#endif
};
......
#include_directories("${OPENCV_MODULE_opencv_dnn_BINARY_DIR}") # Cannot open include file: 'layers/layers_common.simd_declarations.hpp'
ocv_create_builtin_dnn_plugin(opencv_dnn_openvino ocv.3rdparty.openvino ${dnn_plugin_srcs})
......@@ -113,10 +113,10 @@ class dnn_test(NewOpenCVTests):
proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt')
model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel')
net = cv.dnn.readNet(proto, model)
net.setPreferableBackend(backend)
net.setPreferableTarget(target)
inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32)
try:
net.setPreferableBackend(backend)
net.setPreferableTarget(target)
inp = np.random.standard_normal([1, 2, 10, 11]).astype(np.float32)
net.setInput(inp)
net.forward()
except BaseException as e:
......
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "precomp.hpp"
#include "backend.hpp"
#include <opencv2/core/private.hpp>
#include <opencv2/core/utils/configuration.private.hpp>
#include <opencv2/core/utils/logger.defines.hpp>
#ifdef NDEBUG
#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_DEBUG + 1
#else
#define CV_LOG_STRIP_LEVEL CV_LOG_LEVEL_VERBOSE + 1
#endif
#include <opencv2/core/utils/logger.hpp>
#include "factory.hpp"
#include "plugin_api.hpp"
#include "plugin_wrapper.impl.hpp"
namespace cv { namespace dnn_backend {
NetworkBackend::~NetworkBackend()
{
// nothing
}
}} // namespace cv::dnn_backend
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef OPENCV_DNN_BACKEND_HPP
#define OPENCV_DNN_BACKEND_HPP
#include <memory>
#include <map>
namespace cv { namespace dnn_backend {
using namespace cv::dnn;
class CV_EXPORTS NetworkBackend
{
public:
virtual ~NetworkBackend();
virtual void switchBackend(Net& net) = 0;
/**
@param loaderID use empty "" for auto
@param model see cv::dnn::readNetwork
@param config see cv::dnn::readNetwork
*/
virtual Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) = 0;
/** @overload */
virtual Net readNetwork(
const std::string& loaderID,
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
) = 0;
// TODO: target as string + configuration
virtual bool checkTarget(Target target) = 0;
};
} // namespace dnn_backend
} // namespace cv
#endif // OPENCV_DNN_BACKEND_HPP
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef OPENCV_DNN_FACTORY_HPP
#define OPENCV_DNN_FACTORY_HPP
#include "backend.hpp"
namespace cv { namespace dnn_backend {
class IDNNBackendFactory
{
public:
virtual ~IDNNBackendFactory() {}
virtual std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const = 0;
};
//
// PluginDNNBackendFactory is implemented in plugin_wrapper
//
std::shared_ptr<IDNNBackendFactory> createPluginDNNBackendFactory(const std::string& baseName);
/// @brief Returns createPluginDNNBackendFactory()->createNetworkBackend()
cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName);
}} // namespace
#endif // OPENCV_DNN_FACTORY_HPP
......@@ -42,7 +42,9 @@
#include "precomp.hpp"
#include <opencv2/dnn/layer.details.hpp>
#if !defined(BUILD_PLUGIN)
#include <google/protobuf/stubs/common.h>
#endif
namespace cv {
namespace dnn {
......@@ -58,6 +60,7 @@ Mutex& getInitializationMutex()
// force initialization (single-threaded environment)
Mutex* __initialization_mutex_initializer = &getInitializationMutex();
#if !defined(BUILD_PLUGIN)
namespace {
using namespace google::protobuf;
class ProtobufShutdown {
......@@ -71,12 +74,15 @@ public:
}
};
} // namespace
#endif
void initializeLayerFactory()
{
CV_TRACE_FUNCTION();
#if !defined(BUILD_PLUGIN)
static ProtobufShutdown protobufShutdown; CV_UNUSED(protobufShutdown);
#endif
CV_DNN_REGISTER_LAYER_CLASS(Slice, SliceLayer);
CV_DNN_REGISTER_LAYER_CLASS(Split, SplitLayer);
......
......@@ -4,8 +4,6 @@
#include "precomp.hpp"
#include <opencv2/imgproc.hpp>
#include <opencv2/dnn/layer_reg.private.hpp> // getLayerFactoryImpl
......
......@@ -96,22 +96,6 @@ struct LayerData
int flag;
Ptr<Layer> getLayerInstance()
{
CV_TRACE_FUNCTION();
CV_TRACE_ARG_VALUE(type, "type", type.c_str());
if (layerInstance)
return layerInstance;
layerInstance = LayerFactory::createLayerInstance(type, params);
if (!layerInstance)
{
CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\"");
}
return layerInstance;
}
void resetAllocation()
{
......
......@@ -222,14 +222,14 @@ void Net::Impl::setUpNet(const std::vector<LayerPin>& blobsToKeep_)
Ptr<Layer> Net::Impl::getLayer(int layerId) const
{
LayerData& ld = getLayerData(layerId);
return ld.getLayerInstance();
return getLayerInstance(ld);
}
Ptr<Layer> Net::Impl::getLayer(const LayerId& layerId) const
{
LayerData& ld = getLayerData(layerId);
return ld.getLayerInstance();
return getLayerInstance(ld);
}
......@@ -321,7 +321,7 @@ int Net::Impl::resolvePinOutputName(LayerData& ld, const String& outName) const
{
if (outName.empty())
return 0;
return ld.getLayerInstance()->outputNameToIndex(outName);
return getLayerInstance(ld)->outputNameToIndex(outName);
}
......@@ -522,7 +522,7 @@ void Net::Impl::allocateLayer(int lid, const LayersShapesMap& layersShapes)
for (int i = 0; i < ld.internalBlobsWrappers.size(); ++i)
ld.internalBlobsWrappers[i] = wrap(ld.internals[i]);
Ptr<Layer> layerPtr = ld.getLayerInstance();
Ptr<Layer> layerPtr = getLayerInstance(ld);
{
std::vector<Mat> inps(ld.inputBlobs.size());
for (int i = 0; i < ld.inputBlobs.size(); ++i)
......@@ -1148,7 +1148,7 @@ void Net::Impl::getLayerShapesRecursively(int id, LayersShapesMap& inOutShapes)
ShapesVec& os = layerShapes.out;
ShapesVec& ints = layerShapes.internal;
int requiredOutputs = layerData.requiredOutputs.size();
Ptr<Layer> l = layerData.getLayerInstance();
const Ptr<Layer>& l = getLayerInstance(layerData);
CV_Assert(l);
bool layerSupportInPlace = false;
try
......@@ -1302,7 +1302,7 @@ void Net::Impl::updateLayersShapes()
const MatShape& shape = layersShapes[inputLayerId].out[inputPin.oid];
layerShapes.in.push_back(shape);
}
layerData.getLayerInstance()->updateMemoryShapes(layerShapes.in);
getLayerInstance(layerData)->updateMemoryShapes(layerShapes.in);
}
CV_LOG_DEBUG(NULL, "Layer " << layerId << ": " << toString(layerShapes.in, "input shapes"));
CV_LOG_IF_DEBUG(NULL, !layerShapes.out.empty(), "Layer " << layerId << ": " << toString(layerShapes.out, "output shapes"));
......@@ -1451,7 +1451,7 @@ void Net::Impl::setInput(InputArray blob, const String& name, double scalefactor
Mat Net::Impl::getParam(int layer, int numParam) const
{
LayerData& ld = getLayerData(layer);
std::vector<Mat>& layerBlobs = ld.getLayerInstance()->blobs;
std::vector<Mat>& layerBlobs = getLayerInstance(ld)->blobs;
CV_Assert(numParam < (int)layerBlobs.size());
return layerBlobs[numParam];
}
......@@ -1460,7 +1460,8 @@ void Net::Impl::setParam(int layer, int numParam, const Mat& blob)
{
LayerData& ld = getLayerData(layer);
std::vector<Mat>& layerBlobs = ld.getLayerInstance()->blobs;
// FIXIT we should not modify "execution" instance
std::vector<Mat>& layerBlobs = getLayerInstance(ld)->blobs;
CV_Assert(numParam < (int)layerBlobs.size());
// we don't make strong checks, use this function carefully
layerBlobs[numParam] = blob;
......@@ -1927,7 +1928,7 @@ int64 Net::Impl::getFLOPS(const std::vector<MatShape>& netInputShapes) /*const*/
for (int i = 0; i < ids.size(); i++)
{
flops += layers[ids[i]].getLayerInstance()->getFLOPS(inShapes[i], outShapes[i]);
flops += getLayerInstance(layers[ids[i]])->getFLOPS(inShapes[i], outShapes[i]);
}
return flops;
......@@ -1944,7 +1945,7 @@ int64 Net::Impl::getFLOPS(
LayerShapes shapes;
getLayerShapes(netInputShapes, layerId, shapes);
return const_cast<LayerData&>(layer->second).getLayerInstance()->getFLOPS(shapes.in, shapes.out);
return getLayerInstance(const_cast<LayerData&>(layer->second))->getFLOPS(shapes.in, shapes.out);
}
......
......@@ -54,7 +54,6 @@ struct Net::Impl : public detail::NetImplBase
int preferableBackend;
int preferableTarget;
String halideConfigFile;
// bool skipInfEngineInit;
bool hasDynamicShapes;
// Map host data to backend specific wrapper.
std::map<void*, Ptr<BackendWrapper>> backendWrappers;
......@@ -84,6 +83,32 @@ struct Net::Impl : public detail::NetImplBase
void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>());
virtual Ptr<Layer> createLayerInstance(const LayerData& ld) const
{
return LayerFactory::createLayerInstance(ld.type, const_cast<LayerParams&>(ld.params));
}
Ptr<Layer> getLayerInstance(LayerData& ld) const
{
CV_TRACE_FUNCTION();
CV_TRACE_ARG_VALUE(type, "type", ld.type.c_str());
if (ld.layerInstance)
return ld.layerInstance;
ld.layerInstance = createLayerInstance(ld);
if (!ld.layerInstance && basePtr_)
{
ld.layerInstance = basePtr_->createLayerInstance(ld);
CV_LOG_IF_DEBUG(NULL, ld.layerInstance, "Created layer \"" + ld.name + "\" of type \"" + ld.type + "\" from upstream layers registry");
}
if (!ld.layerInstance)
{
CV_Error(Error::StsError, "Can't create layer \"" + ld.name + "\" of type \"" + ld.type + "\"");
}
return ld.layerInstance;
}
Ptr<Layer> getLayer(int layerId) const;
Ptr<Layer> getLayer(const LayerId& layerId) const;
......
......@@ -7,6 +7,9 @@
#include "net_impl.hpp"
#include "legacy_backend.hpp"
#include "backend.hpp"
#include "factory.hpp"
namespace cv {
namespace dnn {
CV__DNN_INLINE_NS_BEGIN
......@@ -166,14 +169,22 @@ void Net::Impl::setPreferableBackend(Net& net, int backendId)
if (preferableBackend != backendId)
{
preferableBackend = backendId;
clear();
#ifdef HAVE_INF_ENGINE
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
{
#if defined(HAVE_INF_ENGINE)
switchToOpenVINOBackend(net);
}
#elif defined(ENABLE_PLUGINS)
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
networkBackend.switchBackend(net);
#else
CV_Error(Error::StsNotImplemented, "OpenVINO backend is not available in the current OpenCV build");
#endif
}
else
{
preferableBackend = backendId;
}
}
}
......
......@@ -634,7 +634,7 @@ void Net::Impl::fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
pin = inp_i_data->inputBlobsId[0];
inp_i_data = &layers[pin.lid];
}
conv_layer = conv_layer && (inp_i_data->getLayerInstance()->type == "Convolution");
conv_layer = conv_layer && (getLayerInstance(*inp_i_data)->type == "Convolution");
}
if (!conv_layer)
continue;
......
......@@ -11,6 +11,9 @@
#include "net_impl.hpp"
#include "backend.hpp"
#include "factory.hpp"
namespace cv {
namespace dnn {
CV__DNN_INLINE_NS_BEGIN
......@@ -80,11 +83,12 @@ public:
if (backendId == DNN_BACKEND_INFERENCE_ENGINE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
return; // no-op
if (!basePtr_)
CV_Error(Error::StsError, "DNN: Can't switch backend of network created by OpenVINO");
CV_Error(Error::StsError, "DNN: Can't switch backend of network created by OpenVINO native loader");
Ptr<Net::Impl>& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net);
impl_ptr_ref = basePtr_;
return basePtr_->setPreferableBackend(net, backendId);
basePtr_->setPreferableBackend(net, backendId);
}
void setPreferableTarget(int targetId) override
{
if (preferableTarget != targetId)
......@@ -121,10 +125,14 @@ public:
);
}
//void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>()) override;
//void setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean) override;
Ptr<Layer> createLayerInstance(const LayerData& ld) const override
{
// try to create layer instance from backend-specific pool (e.g., plugin)
Ptr<Layer> instance = LayerFactory::createLayerInstance(ld.type, const_cast<LayerParams&>(ld.params));
if (!instance)
instance = Base::createLayerInstance(ld);
return instance;
}
void addNgraphOutputs(LayerData& ld);
......@@ -132,8 +140,6 @@ public:
void fuseLayers(const std::vector<LayerPin>& blobsToKeep_) override;
//void allocateLayers(const std::vector<LayerPin>& blobsToKeep_) override;
void forwardLayer(LayerData& ld) override;
AsyncArray getBlobAsync(const LayerPin& pin) override;
......@@ -176,7 +182,7 @@ void NetImplOpenVINO::forwardLayer(LayerData& ld)
tm.stop();
int64 t = tm.getTimeTicks();
layersTimings[ld.id] = (t > 0) ? t : t + 1; // zero for skipped layers only
layersTimings[ld.id] = (t > 0) ? t : 1; // zero for skipped layers only
}
else
{
......@@ -681,14 +687,14 @@ void NetImplOpenVINO::fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
void switchToOpenVINOBackend(Net& net)
{
CV_TRACE_FUNCTION();
CV_LOG_INFO(NULL, "DNN: switching to OpenVINO backend...");
Ptr<Net::Impl>& impl_ptr_ref = accessor::DnnNetAccessor::getImplPtrRef(net);
CV_Assert(impl_ptr_ref);
CV_LOG_INFO(NULL, "DNN: switching to OpenVINO backend... (networkID=" << impl_ptr_ref->networkId << ")");
Ptr<NetImplOpenVINO> openvino_impl_ptr = makePtr<NetImplOpenVINO>(impl_ptr_ref);
impl_ptr_ref = openvino_impl_ptr;
}
/*static*/
Net NetImplOpenVINO::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet)
{
......@@ -792,23 +798,70 @@ Net NetImplOpenVINO::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork
return cvNet;
}
#endif // HAVE_INF_ENGINE
Net Net::readFromModelOptimizer(const String& xml, const String& bin)
static
Net openvino_readNetwork(const String& modelPath, const String& binPath)
{
CV_TRACE_FUNCTION();
#ifndef HAVE_INF_ENGINE
CV_UNUSED(xml); CV_UNUSED(bin);
CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer.");
#else
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
InferenceEngine::Core& ie = getCore("");
InferenceEngine::CNNNetwork ieNet;
try
{
ieNet = ie.ReadNetwork(modelPath, binPath);
}
catch (const std::exception& e)
{
CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model '") + modelPath + "': " + e.what());
}
return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet);
}
static
Net openvino_readNetwork(
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
)
{
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
InferenceEngine::Core& ie = getCore("");
InferenceEngine::CNNNetwork ieNet = ie.ReadNetwork(xml, bin);
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
InferenceEngine::CNNNetwork ieNet;
try
{
InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C);
InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob<uint8_t>(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize);
ieNet = ie.ReadNetwork(model, weights_blob);
}
catch (const std::exception& e)
{
CV_Error(Error::StsError, std::string("DNN: OpenVINO failed to read model: ") + e.what());
}
return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet);
}
#endif // HAVE_INF_ENGINE
Net Net::readFromModelOptimizer(const String& xml, const String& bin)
{
CV_TRACE_FUNCTION();
#if defined(HAVE_INF_ENGINE)
return openvino_readNetwork(xml, bin);
#elif defined(ENABLE_PLUGINS)
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
return networkBackend.readNetwork(std::string(), xml, bin);
#else
CV_UNUSED(xml); CV_UNUSED(bin);
CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer.");
#endif
}
Net Net::readFromModelOptimizer(const std::vector<uchar>& bufferModelConfig, const std::vector<uchar>& bufferWeights)
......@@ -826,34 +879,112 @@ Net Net::readFromModelOptimizer(
)
{
CV_TRACE_FUNCTION();
#ifndef HAVE_INF_ENGINE
#if defined(HAVE_INF_ENGINE)
return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
#elif defined(ENABLE_PLUGINS)
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
return networkBackend.readNetwork(std::string(), bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
#else
CV_UNUSED(bufferModelConfigPtr); CV_UNUSED(bufferWeightsPtr);
CV_UNUSED(bufferModelConfigSize); CV_UNUSED(bufferModelConfigSize);
CV_Error(Error::StsError, "Build OpenCV with Inference Engine to enable loading models from Model Optimizer.");
#else
#endif
}
FPDenormalsIgnoreHintScope fp_denormals_ignore_scope;
InferenceEngine::Core& ie = getCore("");
CV__DNN_INLINE_NS_END
}} // namespace cv::dnn
std::string model; model.assign((char*)bufferModelConfigPtr, bufferModelConfigSize);
InferenceEngine::CNNNetwork ieNet;
try
#ifdef BUILD_PLUGIN
#define ABI_VERSION 0
#define API_VERSION 0
#include "plugin_api.hpp"
namespace cv { namespace dnn_backend {
using namespace cv::dnn;
class NetworkBackendOpenVINO : public NetworkBackend
{
public:
void switchBackend(Net& net) CV_OVERRIDE
{
InferenceEngine::TensorDesc tensorDesc(InferenceEngine::Precision::U8, { bufferWeightsSize }, InferenceEngine::Layout::C);
InferenceEngine::Blob::CPtr weights_blob = InferenceEngine::make_shared_blob<uint8_t>(tensorDesc, (uint8_t*)bufferWeightsPtr, bufferWeightsSize);
cv::dnn::switchToOpenVINOBackend(net);
}
Net readNetwork(const std::string& loaderID, const std::string& model, const std::string& config) CV_OVERRIDE
{
if (!loaderID.empty()) // only auto ("") is supported
{
CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID);
}
return openvino_readNetwork(model, config);
}
Net readNetwork(
const std::string& loaderID,
const uchar* bufferModelConfigPtr, size_t bufferModelConfigSize,
const uchar* bufferWeightsPtr, size_t bufferWeightsSize
) CV_OVERRIDE
{
if (!loaderID.empty()) // only auto ("") is supported
{
CV_Error(Error::StsError, "DNN/OpenVINO: unsupported network loader ID: " + loaderID);
}
return openvino_readNetwork(bufferModelConfigPtr, bufferModelConfigSize, bufferWeightsPtr, bufferWeightsSize);
}
bool checkTarget(Target target) CV_OVERRIDE
{
return openvino::checkTarget(target);
}
};
ieNet = ie.ReadNetwork(model, weights_blob);
static
std::shared_ptr<NetworkBackendOpenVINO>& getInstanceNetworkBackendOpenVINO()
{
static std::shared_ptr<NetworkBackendOpenVINO> g_instance = std::make_shared<NetworkBackendOpenVINO>();
return g_instance;
}
}} // namespace
static
CvResult cv_getInstanceNetworkBackend(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT
{
try
{
if (!handle)
return CV_ERROR_FAIL;
*handle = cv::dnn_backend::getInstanceNetworkBackendOpenVINO().get();
return CV_ERROR_OK;
}
catch (const std::exception& e)
catch (...)
{
CV_Error(Error::StsError, std::string("DNN: IE failed to load model: ") + e.what());
return CV_ERROR_FAIL;
}
}
return NetImplOpenVINO::createNetworkFromModelOptimizer(ieNet);
#endif // HAVE_INF_ENGINE
static const OpenCV_DNN_Plugin_API plugin_api =
{
{
sizeof(OpenCV_DNN_Plugin_API), ABI_VERSION, API_VERSION,
CV_VERSION_MAJOR, CV_VERSION_MINOR, CV_VERSION_REVISION, CV_VERSION_STATUS,
"OpenVINO OpenCV DNN plugin (" CVAUX_STR(INF_ENGINE_RELEASE) ")"
},
{
/* 1*/cv_getInstanceNetworkBackend
}
};
const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT
{
if (requested_abi_version == ABI_VERSION && requested_api_version <= API_VERSION)
return &plugin_api;
return NULL;
}
CV__DNN_INLINE_NS_END
}} // namespace cv::dnn
#endif // BUILD_PLUGIN
......@@ -11,7 +11,11 @@
#ifdef HAVE_INF_ENGINE
#include <ie_extension.h>
#endif // HAVE_INF_ENGINE
#elif defined(ENABLE_PLUGINS)
// using plugin API
#include "backend.hpp"
#include "factory.hpp"
#endif
#include <opencv2/core/utils/configuration.private.hpp>
#include <opencv2/core/utils/logger.hpp>
......@@ -155,7 +159,6 @@ static bool detectMyriadX_(const std::string& device)
}
#endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
#endif // HAVE_INF_ENGINE
......@@ -281,24 +284,100 @@ bool checkTarget(Target target)
#else // HAVE_INF_ENGINE
namespace openvino {
bool checkTarget(Target target)
{
#if defined(ENABLE_PLUGINS)
try
{
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
return networkBackend.checkTarget(target);
}
catch (const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN/OpenVINO: checkTarget failed: " << e.what())
}
#endif
return false;
}
} // namespace openvino
cv::String getInferenceEngineBackendType()
{
#if defined(ENABLE_PLUGINS)
try
{
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
CV_UNUSED(networkBackend);
return CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}
catch (const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
}
#endif
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
}
cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
{
#if defined(ENABLE_PLUGINS)
try
{
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
CV_UNUSED(networkBackend);
CV_Assert(newBackendType == CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
}
catch (const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
}
#endif
CV_UNUSED(newBackendType);
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
}
cv::String getInferenceEngineVPUType()
{
#if defined(ENABLE_PLUGINS)
try
{
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
if (networkBackend.checkTarget(DNN_TARGET_MYRIAD))
return CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X; // 2021.4 supports NCS2 only
CV_Error(Error::StsError, "DNN/OpenVINO: DNN_TARGET_MYRIAD is not available");
}
catch (const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
}
#endif
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
}
cv::String getInferenceEngineCPUType()
{
#if defined(ENABLE_PLUGINS)
try
{
auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
CV_UNUSED(networkBackend);
#if defined(__arm__) || defined(__aarch64__) || defined(_M_ARM64)
return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
#else
return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
#endif
}
catch (const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
}
#endif
CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
}
#endif // HAVE_INF_ENGINE
......
......@@ -60,6 +60,15 @@
namespace cv { namespace dnn {
CV__DNN_INLINE_NS_BEGIN
namespace openvino {
// TODO: use std::string as parameter
bool checkTarget(Target target);
} // namespace openvino
CV__DNN_INLINE_NS_END
#ifdef HAVE_INF_ENGINE
Backend& getInferenceEngineBackendTypeParam();
......@@ -75,13 +84,6 @@ CV__DNN_INLINE_NS_BEGIN
void switchToOpenVINOBackend(Net& net);
namespace openvino {
// TODO: use std::string as parameter
bool checkTarget(Target target);
} // namespace openvino
bool isMyriadX();
bool isArmComputePlugin();
......
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef DNN_PLUGIN_API_HPP
#define DNN_PLUGIN_API_HPP
#include <opencv2/core/cvdef.h>
#include <opencv2/core/llapi/llapi.h>
#include "backend.hpp"
#if !defined(BUILD_PLUGIN)
/// increased for backward-compatible changes, e.g. add new function
/// Caller API <= Plugin API -> plugin is fully compatible
/// Caller API > Plugin API -> plugin is not fully compatible, caller should use extra checks to use plugins with older API
#define API_VERSION 0 // preview
/// increased for incompatible changes, e.g. remove function argument
/// Caller ABI == Plugin ABI -> plugin is compatible
/// Caller ABI > Plugin ABI -> plugin is not compatible, caller should use shim code to use old ABI plugins (caller may know how lower ABI works, so it is possible)
/// Caller ABI < Plugin ABI -> plugin can't be used (plugin should provide interface with lower ABI to handle that)
#define ABI_VERSION 0 // preview
#else // !defined(BUILD_PLUGIN)
#if !defined(ABI_VERSION) || !defined(API_VERSION)
#error "Plugin must define ABI_VERSION and API_VERSION before including plugin_api.hpp"
#endif
#endif // !defined(BUILD_PLUGIN)
typedef cv::dnn_backend::NetworkBackend* CvPluginDNNNetworkBackend;
struct OpenCV_DNN_Plugin_API_v0_0_api_entries
{
/** @brief Get backend API instance
@param[out] handle pointer on inference backend API handle
@note API-CALL 1, API-Version == 0
*/
CvResult (CV_API_CALL *getInstance)(CV_OUT CvPluginDNNNetworkBackend* handle) CV_NOEXCEPT;
}; // OpenCV_DNN_Plugin_API_v0_0_api_entries
typedef struct OpenCV_DNN_Plugin_API_v0
{
OpenCV_API_Header api_header;
struct OpenCV_DNN_Plugin_API_v0_0_api_entries v0;
} OpenCV_DNN_Plugin_API_v0;
#if ABI_VERSION == 0 && API_VERSION == 0
typedef OpenCV_DNN_Plugin_API_v0 OpenCV_DNN_Plugin_API;
#else
#error "Not supported configuration: check ABI_VERSION/API_VERSION"
#endif
#ifdef BUILD_PLUGIN
extern "C" {
CV_PLUGIN_EXPORTS
const OpenCV_DNN_Plugin_API* CV_API_CALL opencv_dnn_plugin_init_v0
(int requested_abi_version, int requested_api_version, void* reserved /*NULL*/) CV_NOEXCEPT;
} // extern "C"
#else // BUILD_PLUGIN
typedef const OpenCV_DNN_Plugin_API* (CV_API_CALL *FN_opencv_dnn_plugin_init_t)
(int requested_abi_version, int requested_api_version, void* reserved /*NULL*/);
#endif // BUILD_PLUGIN
#endif // DNN_PLUGIN_API_HPP
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Not a standalone header, part of backend.cpp
//
//==================================================================================================
// Dynamic backend implementation
#include "opencv2/core/utils/plugin_loader.private.hpp"
namespace cv { namespace impl {
using namespace cv::dnn_backend;
#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
using namespace cv::plugin::impl; // plugin_loader.hpp
class PluginDNNBackend CV_FINAL: public std::enable_shared_from_this<PluginDNNBackend>
{
protected:
void initPluginAPI()
{
const char* init_name = "opencv_dnn_plugin_init_v0";
FN_opencv_dnn_plugin_init_t fn_init = reinterpret_cast<FN_opencv_dnn_plugin_init_t>(lib_->getSymbol(init_name));
if (fn_init)
{
CV_LOG_DEBUG(NULL, "Found entry: '" << init_name << "'");
for (int supported_api_version = API_VERSION; supported_api_version >= 0; supported_api_version--)
{
plugin_api_ = fn_init(ABI_VERSION, supported_api_version, NULL);
if (plugin_api_)
break;
}
if (!plugin_api_)
{
CV_LOG_INFO(NULL, "DNN: plugin is incompatible (can't be initialized): " << lib_->getName());
return;
}
// NB: force strict minor version check (ABI is not preserved for now)
if (!checkCompatibility(plugin_api_->api_header, ABI_VERSION, API_VERSION, true))
{
plugin_api_ = NULL;
return;
}
CV_LOG_INFO(NULL, "DNN: plugin is ready to use '" << plugin_api_->api_header.api_description << "'");
}
else
{
CV_LOG_INFO(NULL, "DNN: plugin is incompatible, missing init function: '" << init_name << "', file: " << lib_->getName());
}
}
bool checkCompatibility(const OpenCV_API_Header& api_header, unsigned int abi_version, unsigned int api_version, bool checkMinorOpenCVVersion)
{
if (api_header.opencv_version_major != CV_VERSION_MAJOR)
{
CV_LOG_ERROR(NULL, "DNN: wrong OpenCV major version used by plugin '" << api_header.api_description << "': " <<
cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor))
return false;
}
if (!checkMinorOpenCVVersion)
{
// no checks for OpenCV minor version
}
else if (api_header.opencv_version_minor != CV_VERSION_MINOR)
{
CV_LOG_ERROR(NULL, "DNN: wrong OpenCV minor version used by plugin '" << api_header.api_description << "': " <<
cv::format("%d.%d, OpenCV version is '" CV_VERSION "'", api_header.opencv_version_major, api_header.opencv_version_minor))
return false;
}
CV_LOG_DEBUG(NULL, "DNN: initialized '" << api_header.api_description << "': built with "
<< cv::format("OpenCV %d.%d (ABI/API = %d/%d)",
api_header.opencv_version_major, api_header.opencv_version_minor,
api_header.min_api_version, api_header.api_version)
<< ", current OpenCV version is '" CV_VERSION "' (ABI/API = " << abi_version << "/" << api_version << ")"
);
if (api_header.min_api_version != abi_version) // future: range can be here
{
// actually this should never happen due to checks in plugin's init() function
CV_LOG_ERROR(NULL, "DNN: plugin is not supported due to incompatible ABI = " << api_header.min_api_version);
return false;
}
if (api_header.api_version != api_version)
{
CV_LOG_INFO(NULL, "DNN: NOTE: plugin is supported, but there is API version mismath: "
<< cv::format("plugin API level (%d) != OpenCV API level (%d)", api_header.api_version, api_version));
if (api_header.api_version < api_version)
{
CV_LOG_INFO(NULL, "DNN: NOTE: some functionality may be unavailable due to lack of support by plugin implementation");
}
}
return true;
}
public:
std::shared_ptr<cv::plugin::impl::DynamicLib> lib_;
const OpenCV_DNN_Plugin_API* plugin_api_;
PluginDNNBackend(const std::shared_ptr<cv::plugin::impl::DynamicLib>& lib)
: lib_(lib)
, plugin_api_(NULL)
{
initPluginAPI();
}
std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const
{
CV_Assert(plugin_api_);
CvPluginDNNNetworkBackend instancePtr = NULL;
if (plugin_api_->v0.getInstance)
{
if (CV_ERROR_OK == plugin_api_->v0.getInstance(&instancePtr))
{
CV_Assert(instancePtr);
// TODO C++20 "aliasing constructor"
return std::shared_ptr<cv::dnn_backend::NetworkBackend>(instancePtr, [](cv::dnn_backend::NetworkBackend*){}); // empty deleter
}
}
return std::shared_ptr<cv::dnn_backend::NetworkBackend>();
}
}; // class PluginDNNBackend
class PluginDNNBackendFactory CV_FINAL: public IDNNBackendFactory
{
public:
std::string baseName_;
std::shared_ptr<PluginDNNBackend> backend;
bool initialized;
public:
PluginDNNBackendFactory(const std::string& baseName)
: baseName_(baseName)
, initialized(false)
{
// nothing, plugins are loaded on demand
}
std::shared_ptr<cv::dnn_backend::NetworkBackend> createNetworkBackend() const CV_OVERRIDE
{
if (!initialized)
{
const_cast<PluginDNNBackendFactory*>(this)->initBackend();
}
if (backend)
return backend->createNetworkBackend();
return std::shared_ptr<cv::dnn_backend::NetworkBackend>();
}
protected:
void initBackend()
{
AutoLock lock(getInitializationMutex());
try
{
if (!initialized)
loadPlugin();
}
catch (...)
{
CV_LOG_INFO(NULL, "DNN: exception during plugin loading: " << baseName_ << ". SKIP");
}
initialized = true;
}
void loadPlugin();
};
static
std::vector<FileSystemPath_t> getPluginCandidates(const std::string& baseName)
{
using namespace cv::utils;
using namespace cv::utils::fs;
const std::string baseName_l = toLowerCase(baseName);
const std::string baseName_u = toUpperCase(baseName);
const FileSystemPath_t baseName_l_fs = toFileSystemPath(baseName_l);
std::vector<FileSystemPath_t> paths;
// TODO OPENCV_PLUGIN_PATH
const std::vector<std::string> paths_ = getConfigurationParameterPaths("OPENCV_DNN_PLUGIN_PATH", std::vector<std::string>());
if (paths_.size() != 0)
{
for (size_t i = 0; i < paths_.size(); i++)
{
paths.push_back(toFileSystemPath(paths_[i]));
}
}
else
{
FileSystemPath_t binaryLocation;
if (getBinLocation(binaryLocation))
{
binaryLocation = getParent(binaryLocation);
#ifndef CV_DNN_PLUGIN_SUBDIRECTORY
paths.push_back(binaryLocation);
#else
paths.push_back(binaryLocation + toFileSystemPath("/") + toFileSystemPath(CV_DNN_PLUGIN_SUBDIRECTORY_STR));
#endif
}
}
const std::string default_expr = libraryPrefix() + "opencv_dnn_" + baseName_l + "*" + librarySuffix();
const std::string plugin_expr = getConfigurationParameterString((std::string("OPENCV_DNN_PLUGIN_") + baseName_u).c_str(), default_expr.c_str());
std::vector<FileSystemPath_t> results;
#ifdef _WIN32
FileSystemPath_t moduleName = toFileSystemPath(libraryPrefix() + "opencv_dnn_" + baseName_l + librarySuffix());
if (plugin_expr != default_expr)
{
moduleName = toFileSystemPath(plugin_expr);
results.push_back(moduleName);
}
for (const FileSystemPath_t& path : paths)
{
results.push_back(path + L"\\" + moduleName);
}
results.push_back(moduleName);
#else
CV_LOG_DEBUG(NULL, "DNN: " << baseName << " plugin's glob is '" << plugin_expr << "', " << paths.size() << " location(s)");
for (const std::string& path : paths)
{
if (path.empty())
continue;
std::vector<std::string> candidates;
cv::glob(utils::fs::join(path, plugin_expr), candidates);
// Prefer candidates with higher versions
// TODO: implemented accurate versions-based comparator
std::sort(candidates.begin(), candidates.end(), std::greater<std::string>());
CV_LOG_DEBUG(NULL, " - " << path << ": " << candidates.size());
copy(candidates.begin(), candidates.end(), back_inserter(results));
}
#endif
CV_LOG_DEBUG(NULL, "Found " << results.size() << " plugin(s) for " << baseName);
return results;
}
void PluginDNNBackendFactory::loadPlugin()
{
for (const FileSystemPath_t& plugin : getPluginCandidates(baseName_))
{
auto lib = std::make_shared<cv::plugin::impl::DynamicLib>(plugin);
if (!lib->isLoaded())
{
continue;
}
try
{
auto pluginBackend = std::make_shared<PluginDNNBackend>(lib);
if (!pluginBackend)
{
continue;
}
if (pluginBackend->plugin_api_ == NULL)
{
CV_LOG_ERROR(NULL, "DNN: no compatible plugin API for backend: " << baseName_ << " in " << toPrintablePath(plugin));
continue;
}
// NB: we are going to use backend, so prevent automatic library unloading
lib->disableAutomaticLibraryUnloading();
backend = pluginBackend;
return;
}
catch (...)
{
CV_LOG_WARNING(NULL, "DNN: exception during plugin initialization: " << toPrintablePath(plugin) << ". SKIP");
}
}
}
#endif // OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
} // namespace
namespace dnn_backend {
std::shared_ptr<IDNNBackendFactory> createPluginDNNBackendFactory(const std::string& baseName)
{
#if OPENCV_HAVE_FILESYSTEM_SUPPORT && defined(ENABLE_PLUGINS)
const std::string baseName_u = toUpperCase(baseName);
AutoLock lock(getInitializationMutex());
static std::map<std::string, std::shared_ptr<IDNNBackendFactory>> g_plugins_cache;
auto it = g_plugins_cache.find(baseName_u);
if (it == g_plugins_cache.end())
{
auto factory = std::make_shared<impl::PluginDNNBackendFactory>(baseName);
g_plugins_cache.insert(std::pair<std::string, std::shared_ptr<IDNNBackendFactory>>(baseName_u, factory));
return factory;
}
return it->second;
#else
CV_UNUSED(baseName);
return std::shared_ptr<IDNNBackendFactory>();
#endif
}
cv::dnn_backend::NetworkBackend& createPluginDNNNetworkBackend(const std::string& baseName)
{
auto factory = dnn_backend::createPluginDNNBackendFactory(baseName);
if (!factory)
{
CV_Error(Error::StsNotImplemented, cv::format("Plugin factory is not available: '%s'", baseName.c_str()));
}
auto backend = factory->createNetworkBackend();
if (!backend)
{
CV_Error(Error::StsNotImplemented, cv::format("Backend (plugin) is not available: '%s'", baseName.c_str()));
}
return *backend;
}
}} // namespace
......@@ -39,8 +39,14 @@
//
//M*/
#include <opencv2/core.hpp>
#if !defined(BUILD_PLUGIN)
#include "cvconfig.h"
#else
#include <opencv2/core/cvdef.h>
#undef __OPENCV_BUILD // allow public API only
#endif
#include <opencv2/core.hpp>
#ifndef CV_OCL4DNN
#define CV_OCL4DNN 0
......
......@@ -14,6 +14,8 @@
#include "halide_scheduler.hpp"
#include "backend.hpp"
#include "factory.hpp"
namespace cv {
namespace dnn {
......@@ -43,43 +45,46 @@ private:
#endif
#endif // HAVE_HALIDE
bool haveBackendOpenVINO = false;
#ifdef HAVE_INF_ENGINE
if (openvino::checkTarget(DNN_TARGET_CPU))
haveBackendOpenVINO = true;
#elif defined(ENABLE_PLUGINS)
{
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_CPU));
auto factory = dnn_backend::createPluginDNNBackendFactory("openvino");
if (factory)
{
auto backend = factory->createNetworkBackend();
if (backend)
haveBackendOpenVINO = true;
}
}
#endif
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_CPU))
{
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_CPU));
}
if (openvino::checkTarget(DNN_TARGET_MYRIAD))
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_MYRIAD))
{
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_MYRIAD));
#endif
}
if (openvino::checkTarget(DNN_TARGET_HDDL))
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_HDDL))
{
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_HDDL));
#endif
}
#ifdef HAVE_OPENCL
if (cv::ocl::useOpenCL() && ocl::Device::getDefault().isIntel())
{
if (openvino::checkTarget(DNN_TARGET_OPENCL))
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL))
{
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL));
#endif
}
if (openvino::checkTarget(DNN_TARGET_OPENCL_FP16))
if (haveBackendOpenVINO && openvino::checkTarget(DNN_TARGET_OPENCL_FP16))
{
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL_FP16));
#endif
}
}
#endif
#endif // HAVE_INF_ENGINE
#endif // HAVE_OPENCL
#ifdef HAVE_WEBNN
if (haveWebnn())
......@@ -132,10 +137,9 @@ std::vector<Target> getAvailableTargets(Backend be)
{
if (be == DNN_BACKEND_DEFAULT)
be = (Backend)getParam_DNN_BACKEND_DEFAULT();
#ifdef HAVE_INF_ENGINE
if (be == DNN_BACKEND_INFERENCE_ENGINE)
be = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
#endif
std::vector<Target> result;
const BackendRegistry::BackendsList all_backends = getAvailableBackends();
......
......@@ -130,9 +130,7 @@ void normAssertTextDetections(
void readFileContent(const std::string& filename, CV_OUT std::vector<char>& content);
#ifdef HAVE_INF_ENGINE
bool validateVPUType();
#endif
testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTargets(
bool withInferenceEngine = true,
......
......@@ -254,9 +254,7 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
bool withWebnn /*= false*/
)
{
#ifdef HAVE_INF_ENGINE
bool withVPU = validateVPUType();
#endif
std::vector< tuple<Backend, Target> > targets;
std::vector< Target > available;
......@@ -266,7 +264,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
for (std::vector< Target >::const_iterator i = available.begin(); i != available.end(); ++i)
targets.push_back(make_tuple(DNN_BACKEND_HALIDE, *i));
}
#ifdef HAVE_INF_ENGINE
if (withInferenceEngine)
{
available = getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019);
......@@ -288,9 +285,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
}
}
#else
CV_UNUSED(withInferenceEngine);
#endif
if (withVkCom)
{
available = getAvailableTargets(DNN_BACKEND_VKCOM);
......@@ -356,7 +350,6 @@ testing::internal::ParamGenerator< tuple<Backend, Target> > dnnBackendsAndTarget
#endif
}
#ifdef HAVE_INF_ENGINE
static std::string getTestInferenceEngineVPUType()
{
static std::string param_vpu_type = utils::getConfigurationParameterString("OPENCV_TEST_DNN_IE_VPU_TYPE", "");
......@@ -419,7 +412,6 @@ bool validateVPUType()
static bool result = validateVPUType_();
return result;
}
#endif // HAVE_INF_ENGINE
void initDNNTests()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册