提交 9076bb60 编写于 作者: A Alexander Alekhin

Merge pull request #12081 from mshabunin:fix-ie-build

...@@ -322,12 +322,30 @@ InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t) noex ...@@ -322,12 +322,30 @@ InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t) noex
return InferenceEngine::StatusCode::OK; return InferenceEngine::StatusCode::OK;
} }
InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(size_t size, InferenceEngine::ResponseDesc *responseDesc) noexcept
{
CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK;
}
size_t InfEngineBackendNet::getBatchSize() const noexcept size_t InfEngineBackendNet::getBatchSize() const noexcept
{ {
CV_Error(Error::StsNotImplemented, ""); CV_Error(Error::StsNotImplemented, "");
return 0; return 0;
} }
InferenceEngine::StatusCode InfEngineBackendNet::AddExtension(const InferenceEngine::IShapeInferExtensionPtr &extension, InferenceEngine::ResponseDesc *resp) noexcept
{
CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK;
}
InferenceEngine::StatusCode InfEngineBackendNet::reshape(const InferenceEngine::ICNNNetwork::InputShapes &inputShapes, InferenceEngine::ResponseDesc *resp) noexcept
{
CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK;
}
void InfEngineBackendNet::init(int targetId) void InfEngineBackendNet::init(int targetId)
{ {
if (inputs.empty()) if (inputs.empty())
......
...@@ -9,6 +9,8 @@ ...@@ -9,6 +9,8 @@
#define __OPENCV_DNN_OP_INF_ENGINE_HPP__ #define __OPENCV_DNN_OP_INF_ENGINE_HPP__
#include "opencv2/core/cvdef.h" #include "opencv2/core/cvdef.h"
#include "opencv2/core/cvstd.hpp"
#include "opencv2/dnn.hpp"
#ifdef HAVE_INF_ENGINE #ifdef HAVE_INF_ENGINE
#if defined(__GNUC__) && __GNUC__ >= 5 #if defined(__GNUC__) && __GNUC__ >= 5
...@@ -86,8 +88,14 @@ public: ...@@ -86,8 +88,14 @@ public:
virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE; virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE;
virtual InferenceEngine::StatusCode setBatchSize(size_t size, InferenceEngine::ResponseDesc* responseDesc) noexcept;
virtual size_t getBatchSize() const noexcept CV_OVERRIDE; virtual size_t getBatchSize() const noexcept CV_OVERRIDE;
virtual InferenceEngine::StatusCode AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension, InferenceEngine::ResponseDesc* resp) noexcept;
virtual InferenceEngine::StatusCode reshape(const InputShapes& inputShapes, InferenceEngine::ResponseDesc* resp) noexcept;
void init(int targetId); void init(int targetId);
void addBlobs(const std::vector<Ptr<BackendWrapper> >& wrappers); void addBlobs(const std::vector<Ptr<BackendWrapper> >& wrappers);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册