perf_net.cpp 5.1 KB
Newer Older
A
Alexander Alekhin 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2017, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.

#include "perf_precomp.hpp"
#include "opencv2/core/ocl.hpp"

#include "opencv2/dnn/shape_utils.hpp"

namespace
{

#ifdef HAVE_HALIDE
#define TEST_DNN_BACKEND DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE
#else
#define TEST_DNN_BACKEND DNN_BACKEND_DEFAULT
#endif
#define TEST_DNN_TARGET DNN_TARGET_CPU, DNN_TARGET_OPENCL

CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE)
CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL)

class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<DNNBackend, DNNTarget> >
{
public:
    dnn::Backend backend;
    dnn::Target target;

    dnn::Net net;

    void processNet(std::string weights, std::string proto, std::string halide_scheduler,
A
Alexander Alekhin 已提交
35
                        const Mat& input, const std::string& outputLayer,
A
Alexander Alekhin 已提交
36 37 38 39 40 41 42
                        const std::string& framework)
    {
        backend = (dnn::Backend)(int)get<0>(GetParam());
        target = (dnn::Target)(int)get<1>(GetParam());

        if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL)
        {
43
#if defined(HAVE_OPENCL)
A
Alexander Alekhin 已提交
44 45 46 47 48 49 50 51 52 53 54 55
            if (!cv::ocl::useOpenCL())
#endif
            {
                throw ::SkipTestException("OpenCL is not available/disabled in OpenCV");
            }
        }

        randu(input, 0.0f, 1.0f);

        weights = findDataFile(weights, false);
        if (!proto.empty())
            proto = findDataFile(proto, false);
A
Alexander Alekhin 已提交
56 57 58 59 60 61 62
        if (backend == DNN_BACKEND_HALIDE)
        {
            if (halide_scheduler == "disabled")
                throw ::SkipTestException("Halide test is disabled");
            if (!halide_scheduler.empty())
                halide_scheduler = findDataFile(std::string("dnn/halide_scheduler_") + (target == DNN_TARGET_OPENCL ? "opencl_" : "") + halide_scheduler, true);
        }
A
Alexander Alekhin 已提交
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
        if (framework == "caffe")
        {
            net = cv::dnn::readNetFromCaffe(proto, weights);
        }
        else if (framework == "torch")
        {
            net = cv::dnn::readNetFromTorch(weights);
        }
        else if (framework == "tensorflow")
        {
            net = cv::dnn::readNetFromTensorflow(weights);
        }
        else
            CV_Error(Error::StsNotImplemented, "Unknown framework " + framework);

        net.setInput(blobFromImage(input, 1.0, Size(), Scalar(), false));
        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);
        if (backend == DNN_BACKEND_HALIDE)
        {
            net.setHalideScheduler(halide_scheduler);
        }

A
Alexander Alekhin 已提交
86
        MatShape netInputShape = shape(1, 3, input.rows, input.cols);
A
Alexander Alekhin 已提交
87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
        size_t weightsMemory = 0, blobsMemory = 0;
        net.getMemoryConsumption(netInputShape, weightsMemory, blobsMemory);
        int64 flops = net.getFLOPS(netInputShape);

        net.forward(outputLayer); // warmup

        std::cout << "Memory consumption:" << std::endl;
        std::cout << "    Weights(parameters): " << divUp(weightsMemory, 1u<<20) << " Mb" << std::endl;
        std::cout << "    Blobs: " << divUp(blobsMemory, 1u<<20) << " Mb" << std::endl;
        std::cout << "Calculation complexity: " << flops * 1e-9 << " GFlops" << std::endl;

        PERF_SAMPLE_BEGIN()
            net.forward();
        PERF_SAMPLE_END()

        SANITY_CHECK_NOTHING();
    }
};


PERF_TEST_P_(DNNTestNetwork, AlexNet)
{
    processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt",
A
Alexander Alekhin 已提交
110
            "alexnet.yml", Mat(cv::Size(227, 227), CV_32FC3), "prob", "caffe");
A
Alexander Alekhin 已提交
111 112 113 114 115
}

PERF_TEST_P_(DNNTestNetwork, GoogLeNet)
{
    processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
A
Alexander Alekhin 已提交
116
            "", Mat(cv::Size(224, 224), CV_32FC3), "prob", "caffe");
A
Alexander Alekhin 已提交
117 118 119 120 121
}

PERF_TEST_P_(DNNTestNetwork, ResNet50)
{
    processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
A
Alexander Alekhin 已提交
122
            "resnet_50.yml", Mat(cv::Size(224, 224), CV_32FC3), "prob", "caffe");
A
Alexander Alekhin 已提交
123 124 125 126 127
}

PERF_TEST_P_(DNNTestNetwork, SqueezeNet_v1_1)
{
    processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
A
Alexander Alekhin 已提交
128
            "squeezenet_v1_1.yml", Mat(cv::Size(227, 227), CV_32FC3), "prob", "caffe");
A
Alexander Alekhin 已提交
129 130 131 132 133 134
}

PERF_TEST_P_(DNNTestNetwork, Inception_5h)
{
    processNet("dnn/tensorflow_inception_graph.pb", "",
            "inception_5h.yml",
A
Alexander Alekhin 已提交
135
            Mat(cv::Size(224, 224), CV_32FC3), "softmax2", "tensorflow");
A
Alexander Alekhin 已提交
136 137 138 139 140
}

PERF_TEST_P_(DNNTestNetwork, ENet)
{
    processNet("dnn/Enet-model-best.net", "", "enet.yml",
A
Alexander Alekhin 已提交
141
            Mat(cv::Size(512, 256), CV_32FC3), "l367_Deconvolution", "torch");
A
Alexander Alekhin 已提交
142 143
}

A
Alexander Alekhin 已提交
144 145 146 147 148
PERF_TEST_P_(DNNTestNetwork, SSD)
{
    processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel", "dnn/ssd_vgg16.prototxt", "disabled",
            Mat(cv::Size(300, 300), CV_32FC3), "detection_out", "caffe");
}
A
Alexander Alekhin 已提交
149 150 151 152 153 154 155 156 157

INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork,
    testing::Combine(
        ::testing::Values(TEST_DNN_BACKEND),
        DNNTarget::all()
    )
);

} // namespace