提交 2bba0f29 编写于 作者: A Alexander Alekhin

Merge pull request #13493 from dkurt:dnn_ie_r5

...@@ -116,9 +116,15 @@ public: ...@@ -116,9 +116,15 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE virtual bool supportBackend(int backendId) CV_OVERRIDE
{ {
#ifdef HAVE_INF_ENGINE
if (backendId == DNN_BACKEND_INFERENCE_ENGINE) if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
return !zeroDev && eps <= 1e-7f;
#else
return !zeroDev && (preferableTarget == DNN_TARGET_CPU || eps <= 1e-7f); return !zeroDev && (preferableTarget == DNN_TARGET_CPU || eps <= 1e-7f);
#endif
else else
#endif // HAVE_INF_ENGINE
return backendId == DNN_BACKEND_OPENCV; return backendId == DNN_BACKEND_OPENCV;
} }
......
...@@ -420,31 +420,30 @@ void ONNXImporter::populateNet(Net dstNet) ...@@ -420,31 +420,30 @@ void ONNXImporter::populateNet(Net dstNet)
} }
else if (layer_type == "Sub") else if (layer_type == "Sub")
{ {
Mat blob = (-1.0f) * getBlob(node_proto, constBlobs, 1); Mat blob = getBlob(node_proto, constBlobs, 1);
blob = blob.reshape(1, 1);
if (blob.total() == 1) { if (blob.total() == 1) {
layerParams.type = "Power"; layerParams.type = "Power";
layerParams.set("shift", blob.at<float>(0)); layerParams.set("shift", -blob.at<float>(0));
} }
else { else {
layerParams.type = "Scale"; layerParams.type = "Scale";
layerParams.set("has_bias", true); layerParams.set("has_bias", true);
layerParams.blobs.push_back(blob); layerParams.blobs.push_back(-1.0f * blob.reshape(1, 1));
} }
} }
else if (layer_type == "Div") else if (layer_type == "Div")
{ {
Mat blob = getBlob(node_proto, constBlobs, 1); Mat blob = getBlob(node_proto, constBlobs, 1);
CV_Assert_N(blob.type() == CV_32F, blob.total()); CV_Assert_N(blob.type() == CV_32F, blob.total());
divide(1.0, blob, blob);
if (blob.total() == 1) if (blob.total() == 1)
{ {
layerParams.set("scale", blob.at<float>(0)); layerParams.set("scale", 1.0f / blob.at<float>(0));
layerParams.type = "Power"; layerParams.type = "Power";
} }
else else
{ {
layerParams.type = "Scale"; layerParams.type = "Scale";
divide(1.0, blob, blob);
layerParams.blobs.push_back(blob); layerParams.blobs.push_back(blob);
layerParams.set("bias_term", false); layerParams.set("bias_term", false);
} }
......
...@@ -226,9 +226,9 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) ...@@ -226,9 +226,9 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
TEST_P(DNNTestNetwork, OpenFace) TEST_P(DNNTestNetwork, OpenFace)
{ {
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
#if INF_ENGINE_RELEASE < 2018030000 #if (INF_ENGINE_RELEASE < 2018030000 || INF_ENGINE_RELEASE == 2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3"); throw SkipTestException("");
#elif INF_ENGINE_RELEASE < 2018040000 #elif INF_ENGINE_RELEASE < 2018040000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("Test is enabled starts from OpenVINO 2018R4"); throw SkipTestException("Test is enabled starts from OpenVINO 2018R4");
......
...@@ -190,6 +190,14 @@ TEST_P(DNNTestOpenVINO, models) ...@@ -190,6 +190,14 @@ TEST_P(DNNTestOpenVINO, models)
modelName == "landmarks-regression-retail-0009" || modelName == "landmarks-regression-retail-0009" ||
modelName == "semantic-segmentation-adas-0001"))) modelName == "semantic-segmentation-adas-0001")))
throw SkipTestException(""); throw SkipTestException("");
#elif INF_ENGINE_RELEASE == 2018050000
if (modelName == "single-image-super-resolution-0063" ||
modelName == "single-image-super-resolution-1011" ||
modelName == "single-image-super-resolution-1021" ||
(target == DNN_TARGET_OPENCL_FP16 && modelName == "face-reidentification-retail-0095") ||
(target == DNN_TARGET_MYRIAD && (modelName == "license-plate-recognition-barrier-0001" ||
modelName == "semantic-segmentation-adas-0001")))
throw SkipTestException("");
#endif #endif
#endif #endif
......
...@@ -295,6 +295,10 @@ TEST_P(Test_Caffe_layers, Eltwise) ...@@ -295,6 +295,10 @@ TEST_P(Test_Caffe_layers, Eltwise)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException(""); throw SkipTestException("");
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("Test is disabled for OpenVINO 2018R5");
#endif
testLayerUsingCaffeModels("layer_eltwise"); testLayerUsingCaffeModels("layer_eltwise");
} }
......
...@@ -164,6 +164,8 @@ TEST_P(Test_ONNX_layers, MultyInputs) ...@@ -164,6 +164,8 @@ TEST_P(Test_ONNX_layers, MultyInputs)
TEST_P(Test_ONNX_layers, DynamicReshape) TEST_P(Test_ONNX_layers, DynamicReshape)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
testONNXModels("dynamic_reshape"); testONNXModels("dynamic_reshape");
} }
...@@ -249,6 +251,10 @@ TEST_P(Test_ONNX_nets, VGG16) ...@@ -249,6 +251,10 @@ TEST_P(Test_ONNX_nets, VGG16)
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) { else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) {
lInf = 1.2e-4; lInf = 1.2e-4;
} }
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE >= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
l1 = 0.131;
#endif
testONNXModels("vgg16", pb, l1, lInf); testONNXModels("vgg16", pb, l1, lInf);
} }
...@@ -327,7 +333,7 @@ TEST_P(Test_ONNX_nets, CNN_MNIST) ...@@ -327,7 +333,7 @@ TEST_P(Test_ONNX_nets, CNN_MNIST)
TEST_P(Test_ONNX_nets, MobileNet_v2) TEST_P(Test_ONNX_nets, MobileNet_v2)
{ {
// output range: [-166; 317] // output range: [-166; 317]
const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.38 : 7e-5; const double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.4 : 7e-5;
const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 2.87 : 5e-4; const double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 2.87 : 5e-4;
testONNXModels("mobilenetv2", pb, l1, lInf); testONNXModels("mobilenetv2", pb, l1, lInf);
} }
...@@ -350,7 +356,17 @@ TEST_P(Test_ONNX_nets, LResNet100E_IR) ...@@ -350,7 +356,17 @@ TEST_P(Test_ONNX_nets, LResNet100E_IR)
TEST_P(Test_ONNX_nets, Emotion_ferplus) TEST_P(Test_ONNX_nets, Emotion_ferplus)
{ {
testONNXModels("emotion_ferplus", pb); double l1 = default_l1;
double lInf = default_lInf;
// Output values are in range [-2.01109, 2.11111]
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
l1 = 0.007;
else if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
{
l1 = 0.021;
lInf = 0.034;
}
testONNXModels("emotion_ferplus", pb, l1, lInf);
} }
TEST_P(Test_ONNX_nets, Inception_v2) TEST_P(Test_ONNX_nets, Inception_v2)
...@@ -371,6 +387,10 @@ TEST_P(Test_ONNX_nets, DenseNet121) ...@@ -371,6 +387,10 @@ TEST_P(Test_ONNX_nets, DenseNet121)
TEST_P(Test_ONNX_nets, Inception_v1) TEST_P(Test_ONNX_nets, Inception_v1)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
#endif
testONNXModels("inception_v1", pb); testONNXModels("inception_v1", pb);
} }
......
...@@ -241,6 +241,10 @@ TEST_P(Test_TensorFlow_layers, unfused_flatten) ...@@ -241,6 +241,10 @@ TEST_P(Test_TensorFlow_layers, unfused_flatten)
TEST_P(Test_TensorFlow_layers, leaky_relu) TEST_P(Test_TensorFlow_layers, leaky_relu)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("");
#endif
runTensorFlowNet("leaky_relu_order1"); runTensorFlowNet("leaky_relu_order1");
runTensorFlowNet("leaky_relu_order2"); runTensorFlowNet("leaky_relu_order2");
runTensorFlowNet("leaky_relu_order3"); runTensorFlowNet("leaky_relu_order3");
...@@ -383,6 +387,10 @@ TEST_P(Test_TensorFlow_nets, Faster_RCNN) ...@@ -383,6 +387,10 @@ TEST_P(Test_TensorFlow_nets, Faster_RCNN)
TEST_P(Test_TensorFlow_nets, MobileNet_v1_SSD_PPN) TEST_P(Test_TensorFlow_nets, MobileNet_v1_SSD_PPN)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Unstable test case");
#endif
checkBackend(); checkBackend();
std::string proto = findDataFile("dnn/ssd_mobilenet_v1_ppn_coco.pbtxt", false); std::string proto = findDataFile("dnn/ssd_mobilenet_v1_ppn_coco.pbtxt", false);
std::string model = findDataFile("dnn/ssd_mobilenet_v1_ppn_coco.pb", false); std::string model = findDataFile("dnn/ssd_mobilenet_v1_ppn_coco.pb", false);
...@@ -560,6 +568,10 @@ TEST_P(Test_TensorFlow_layers, slice) ...@@ -560,6 +568,10 @@ TEST_P(Test_TensorFlow_layers, slice)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException(""); throw SkipTestException("");
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
#endif
runTensorFlowNet("slice_4d"); runTensorFlowNet("slice_4d");
} }
......
...@@ -266,9 +266,9 @@ class Test_Torch_nets : public DNNTestLayer {}; ...@@ -266,9 +266,9 @@ class Test_Torch_nets : public DNNTestLayer {};
TEST_P(Test_Torch_nets, OpenFace_accuracy) TEST_P(Test_Torch_nets, OpenFace_accuracy)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000 #if defined(INF_ENGINE_RELEASE) && (INF_ENGINE_RELEASE < 2018030000 || INF_ENGINE_RELEASE == 2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3"); throw SkipTestException("");
#endif #endif
checkBackend(); checkBackend();
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
...@@ -389,6 +389,10 @@ TEST_P(Test_Torch_nets, ENet_accuracy) ...@@ -389,6 +389,10 @@ TEST_P(Test_Torch_nets, ENet_accuracy)
// -model models/instance_norm/feathers.t7 // -model models/instance_norm/feathers.t7
TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy) TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
#endif
checkBackend(); checkBackend();
std::string models[] = {"dnn/fast_neural_style_eccv16_starry_night.t7", std::string models[] = {"dnn/fast_neural_style_eccv16_starry_night.t7",
"dnn/fast_neural_style_instance_norm_feathers.t7"}; "dnn/fast_neural_style_instance_norm_feathers.t7"};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册