object_detector.h 3.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
//   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <string>
#include <vector>
#include <memory>
#include <utility>
21
#include <ctime>
22 23 24 25 26 27 28 29 30 31

#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

#include "paddle_inference_api.h" // NOLINT

#include "include/preprocess_op.h"
#include "include/config_parser.h"

32
using namespace paddle_infer;
33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58

namespace PaddleDetection {
// Object Detection Result
struct ObjectResult {
  // Rectangle coordinates of detected object: left, right, top, down
  std::vector<int> rect;
  // Class id of detected object
  int class_id;
  // Confidence of detected object
  float confidence;
};


// Generate visualization colormap for each class
std::vector<int> GenerateColorMap(int num_class);


// Visualiztion Detection Result
cv::Mat VisualizeResult(const cv::Mat& img,
                     const std::vector<ObjectResult>& results,
                     const std::vector<std::string>& lable_list,
                     const std::vector<int>& colormap);


class ObjectDetector {
 public:
59 60 61 62
  explicit ObjectDetector(const std::string& model_dir, 
                          bool use_gpu=false,
                          const std::string& run_mode="fluid",
                          const int gpu_id=0) {
63 64
    config_.load_config(model_dir);
    threshold_ = config_.draw_threshold_;
65 66 67
    image_shape_ = config_.image_shape_;
    preprocessor_.Init(config_.preprocess_info_, image_shape_);
    LoadModel(model_dir, use_gpu, config_.min_subgraph_size_, 1, run_mode, gpu_id);
68 69 70 71 72
  }

  // Load Paddle inference model
  void LoadModel(
    const std::string& model_dir,
73 74 75
    bool use_gpu,
    const int min_subgraph_size,
    const int batch_size = 1,
76 77
    const std::string& run_mode = "fluid",
    const int gpu_id=0);
78 79

  // Run predictor
80 81 82 83 84 85
  void Predict(const cv::Mat& im,
      const double threshold = 0.5,
      const int warmup = 0,
      const int repeats = 1,
      const bool run_benchmark = false,
      std::vector<ObjectResult>* result = nullptr);
86 87 88 89 90 91 92 93 94 95 96 97 98 99

  // Get Model Label list
  const std::vector<std::string>& GetLabelList() const {
    return config_.label_list_;
  }

 private:
  // Preprocess image and copy data to input buffer
  void Preprocess(const cv::Mat& image_mat);
  // Postprocess result
  void Postprocess(
      const cv::Mat& raw_mat,
      std::vector<ObjectResult>* result);

100
  std::shared_ptr<Predictor> predictor_;
101 102 103 104 105
  Preprocessor preprocessor_;
  ImageBlob inputs_;
  std::vector<float> output_data_;
  float threshold_;
  ConfigPaser config_;
106
  std::vector<int> image_shape_;
107 108 109
};

}  // namespace PaddleDetection