提交 c918ac29 编写于 作者: D Dmitry Kurtaev

Fix IE tests

上级 eb00dce7
......@@ -110,14 +110,25 @@ public:
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
CV_Assert(!input->dims.empty());
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5)
InferenceEngine::Builder::SplitLayer ieLayer(name);
ieLayer.setOutputPorts({InferenceEngine::Port()});
InferenceEngine::Builder::Layer ieLayer(name);
ieLayer.setName(name);
if (preferableTarget == DNN_TARGET_MYRIAD)
{
ieLayer.setType("Copy");
}
else
{
ieLayer.setType("Split");
ieLayer.getParameters()["axis"] = input->dims.size() - 1;
ieLayer.getParameters()["out_sizes"] = input->dims[0];
}
ieLayer.setInputPorts(std::vector<InferenceEngine::Port>(1));
ieLayer.setOutputPorts(std::vector<InferenceEngine::Port>(1));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#else
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
CV_Assert(!input->dims.empty());
InferenceEngine::LayerParams lp;
lp.name = name;
lp.type = "Split";
......
......@@ -1150,7 +1150,7 @@ public:
#ifdef HAVE_INF_ENGINE
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
{
if (INF_ENGINE_RELEASE == 2018050000 && (adjustPad.height || adjustPad.width))
if (INF_ENGINE_RELEASE >= 2018050000 && (adjustPad.height || adjustPad.width))
return false;
const int outGroupCn = blobs[0].size[1]; // Weights are in IOHW layout
......
......@@ -236,6 +236,10 @@ TEST_P(Test_Caffe_layers, Dropout)
TEST_P(Test_Caffe_layers, Concat)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE > 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
#endif
testLayerUsingCaffeModels("layer_concat");
testLayerUsingCaffeModels("layer_concat_optim", true, false);
testLayerUsingCaffeModels("layer_concat_shared_input", true, false);
......
......@@ -395,7 +395,7 @@ TEST_P(Test_ONNX_nets, DenseNet121)
TEST_P(Test_ONNX_nets, Inception_v1)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE >= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for OpenVINO 2018R5");
#endif
......
......@@ -241,7 +241,7 @@ TEST_P(Test_TensorFlow_layers, unfused_flatten)
TEST_P(Test_TensorFlow_layers, leaky_relu)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE >= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("");
#endif
......@@ -388,7 +388,7 @@ TEST_P(Test_TensorFlow_nets, Faster_RCNN)
TEST_P(Test_TensorFlow_nets, MobileNet_v1_SSD_PPN)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE >= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Unstable test case");
#endif
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册