diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index f354d6966e6104536510ac79f63e510402ad3244..43d8d1eb2d3d7d13974292a363a2348c6f33ad23 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -161,6 +161,7 @@ InfEngineBackendNet::InfEngineBackendNet(InferenceEngine::CNNNetwork& net) inputs = net.getInputsInfo(); outputs = net.getOutputsInfo(); layers.resize(net.layerCount()); // A hack to execute InfEngineBackendNet::layerCount correctly. + netOwner = net; } void InfEngineBackendNet::Release() noexcept diff --git a/modules/dnn/src/op_inf_engine.hpp b/modules/dnn/src/op_inf_engine.hpp index 841cb13e13503503f66b59c2547ec8aa96f81386..f49a8e0445feb66be4d284fe49291660f5dbcde8 100644 --- a/modules/dnn/src/op_inf_engine.hpp +++ b/modules/dnn/src/op_inf_engine.hpp @@ -131,6 +131,8 @@ private: InferenceEngine::InferencePlugin plugin; InferenceEngine::ExecutableNetwork netExec; InferenceEngine::InferRequest infRequest; + // In case of models from Model Optimizer we need to manage their lifetime. + InferenceEngine::CNNNetwork netOwner; std::string name; diff --git a/modules/dnn/test/test_ie_models.cpp b/modules/dnn/test/test_ie_models.cpp index 9fefe4fd045089fc0d535cc734076d630763f6e8..22dc1fe65f8ab097d8b6b3c2946bbd83feb11be7 100644 --- a/modules/dnn/test/test_ie_models.cpp +++ b/modules/dnn/test/test_ie_models.cpp @@ -177,10 +177,6 @@ TEST_P(DNNTestOpenVINO, models) Target target = (dnn::Target)(int)get<0>(GetParam()); std::string modelName = get<1>(GetParam()); - if ((modelName == "semantic-segmentation-adas-0001" && target == DNN_TARGET_OPENCL_FP16) || - (modelName == "vehicle-license-plate-detection-barrier-0106")) - throw SkipTestException(""); - std::string precision = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "FP16" : "FP32"; std::string prefix = utils::fs::join("intel_models", utils::fs::join(modelName,