diff --git a/PaddleCV/PaddleDetection/inference/CMakeLists.txt b/PaddleCV/PaddleDetection/inference/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..ed610da047316d0b08d73d51e0223a06180b4026 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/CMakeLists.txt @@ -0,0 +1,272 @@ +cmake_minimum_required(VERSION 3.0) +project(cpp_inference_demo CXX C) +message("cmake module path: ${CMAKE_MODULE_PATH}") +message("cmake root path: ${CMAKE_ROOT}") +option(WITH_MKL "Compile demo with MKL/OpenBlas support,defaultuseMKL." ON) +option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." ON) +option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON) +option(USE_TENSORRT "Compile demo with TensorRT." OFF) + +SET(PADDLE_DIR "" CACHE PATH "Location of libraries") +SET(OPENCV_DIR "" CACHE PATH "Location of libraries") +SET(CUDA_LIB "" CACHE PATH "Location of libraries") + + +include(external-cmake/yaml-cpp.cmake) + +macro(safe_set_static_flag) + foreach(flag_var + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif(${flag_var} MATCHES "/MD") + endforeach(flag_var) +endmacro() + +if (WITH_MKL) + ADD_DEFINITIONS(-DUSE_MKL) +endif() + +if (NOT DEFINED PADDLE_DIR OR ${PADDLE_DIR} STREQUAL "") + message(FATAL_ERROR "please set PADDLE_DIR with -DPADDLE_DIR=/path/paddle_influence_dir") +endif() + +if (NOT DEFINED OPENCV_DIR OR ${OPENCV_DIR} STREQUAL "") + message(FATAL_ERROR "please set OPENCV_DIR with -DOPENCV_DIR=/path/opencv") +endif() + +include_directories("${CMAKE_SOURCE_DIR}/") +include_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/src/ext-yaml-cpp/include") +include_directories("${PADDLE_DIR}/") +include_directories("${PADDLE_DIR}/third_party/install/protobuf/include") +include_directories("${PADDLE_DIR}/third_party/install/glog/include") +include_directories("${PADDLE_DIR}/third_party/install/gflags/include") +include_directories("${PADDLE_DIR}/third_party/install/xxhash/include") +if (EXISTS "${PADDLE_DIR}/third_party/install/snappy/include") + include_directories("${PADDLE_DIR}/third_party/install/snappy/include") +endif() +if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/include") + include_directories("${PADDLE_DIR}/third_party/install/snappystream/include") +endif() +include_directories("${PADDLE_DIR}/third_party/install/zlib/include") +include_directories("${PADDLE_DIR}/third_party/boost") +include_directories("${PADDLE_DIR}/third_party/eigen3") + +if (EXISTS "${PADDLE_DIR}/third_party/install/snappy/lib") + link_directories("${PADDLE_DIR}/third_party/install/snappy/lib") +endif() +if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/lib") + link_directories("${PADDLE_DIR}/third_party/install/snappystream/lib") +endif() + +link_directories("${PADDLE_DIR}/third_party/install/zlib/lib") +link_directories("${PADDLE_DIR}/third_party/install/protobuf/lib") +link_directories("${PADDLE_DIR}/third_party/install/glog/lib") +link_directories("${PADDLE_DIR}/third_party/install/gflags/lib") +link_directories("${PADDLE_DIR}/third_party/install/xxhash/lib") +link_directories("${PADDLE_DIR}/paddle/lib/") +link_directories("${CMAKE_CURRENT_BINARY_DIR}/ext/yaml-cpp/lib") +link_directories("${CMAKE_CURRENT_BINARY_DIR}") +if (WIN32) + include_directories("${PADDLE_DIR}/paddle/fluid/inference") + link_directories("${PADDLE_DIR}/paddle/fluid/inference") + include_directories("${OPENCV_DIR}/build/include") + include_directories("${OPENCV_DIR}/opencv/build/include") + link_directories("${OPENCV_DIR}/build/x64/vc14/lib") +else () + include_directories("${PADDLE_DIR}/paddle/include") + link_directories("${PADDLE_DIR}/paddle/lib") + include_directories("${OPENCV_DIR}/include") + link_directories("${OPENCV_DIR}/lib") +endif () + +if (WIN32) + add_definitions("/DGOOGLE_GLOG_DLL_DECL=") + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd") + set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT") + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd") + set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT") + if (WITH_STATIC_LIB) + safe_set_static_flag() + add_definitions(-DSTATIC_LIB) + endif() +else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -o2 -std=c++11") + set(CMAKE_STATIC_LIBRARY_PREFIX "") +endif() + +# TODO let users define cuda lib path +if (WITH_GPU) + if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "") + message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda-8.0/lib64") + endif() + if (NOT WIN32) + if (NOT DEFINED CUDNN_LIB) + message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn_v7.4/cuda/lib64") + endif() + endif(NOT WIN32) +endif() + + +if (NOT WIN32) + if (USE_TENSORRT AND WITH_GPU) + include_directories("${PADDLE_DIR}/third_party/install/tensorrt/include") + link_directories("${PADDLE_DIR}/third_party/install/tensorrt/lib") + endif() +endif(NOT WIN32) + +if (NOT WIN32) + set(NGRAPH_PATH "${PADDLE_DIR}/third_party/install/ngraph") + if(EXISTS ${NGRAPH_PATH}) + include(GNUInstallDirs) + include_directories("${NGRAPH_PATH}/include") + link_directories("${NGRAPH_PATH}/${CMAKE_INSTALL_LIBDIR}") + set(NGRAPH_LIB ${NGRAPH_PATH}/${CMAKE_INSTALL_LIBDIR}/libngraph${CMAKE_SHARED_LIBRARY_SUFFIX}) + endif() +endif() + +if(WITH_MKL) + include_directories("${PADDLE_DIR}/third_party/install/mklml/include") + if (WIN32) + set(MATH_LIB ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.lib + ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.lib) + else () + set(MATH_LIB ${PADDLE_DIR}/third_party/install/mklml/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} + ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX}) + endif () + set(MKLDNN_PATH "${PADDLE_DIR}/third_party/install/mkldnn") + if(EXISTS ${MKLDNN_PATH}) + include_directories("${MKLDNN_PATH}/include") + if (WIN32) + set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib) + else () + set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0) + endif () + endif() +else() + set(MATH_LIB ${PADDLE_DIR}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) +endif() + +if(WITH_STATIC_LIB) + if (WIN32) + set(DEPS + ${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else () + set(DEPS + ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() +else() + if (WIN32) + set(DEPS + ${PADDLE_DIR}/paddle/fluid/inference/libpaddle_fluid${CMAKE_STATIC_LIBRARY_SUFFIX}) + else () + set(DEPS + ${PADDLE_DIR}/paddle/lib/libpaddle_fluid${CMAKE_SHARED_LIBRARY_SUFFIX}) + endif() +endif() + +if (NOT WIN32) + set(EXTERNAL_LIB "-lrt -ldl -lpthread") + set(DEPS ${DEPS} + ${MATH_LIB} ${MKLDNN_LIB} + glog gflags protobuf yaml-cpp z xxhash + ${EXTERNAL_LIB}) + if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/lib") + set(DEPS ${DEPS} snappystream) + endif() + if (EXISTS "${PADDLE_DIR}/third_party/install/snappy/lib") + set(DEPS ${DEPS} snappy) + endif() +else() + set(DEPS ${DEPS} + ${MATH_LIB} ${MKLDNN_LIB} + opencv_world346 glog libyaml-cppmt gflags_static libprotobuf zlibstatic xxhash ${EXTERNAL_LIB}) + set(DEPS ${DEPS} libcmt shlwapi) + if (EXISTS "${PADDLE_DIR}/third_party/install/snappy/lib") + set(DEPS ${DEPS} snappy) + endif() + if(EXISTS "${PADDLE_DIR}/third_party/install/snappystream/lib") + set(DEPS ${DEPS} snappystream) + endif() +endif(NOT WIN32) + +if(WITH_GPU) + if(NOT WIN32) + if (USE_TENSORRT) + set(DEPS ${DEPS} ${PADDLE_DIR}/third_party/install/tensorrt/lib/libnvinfer${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${PADDLE_DIR}/third_party/install/tensorrt/lib/libnvinfer_plugin${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX}) + else() + set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX} ) + set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX} ) + set(DEPS ${DEPS} ${CUDA_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() +endif() + +if (NOT WIN32) + set(OPENCV_LIB_DIR ${OPENCV_DIR}/lib) + if(EXISTS "${OPENCV_LIB_DIR}") + message("OPENCV_LIB:" ${OPENCV_LIB_DIR}) + else() + set(OPENCV_LIB_DIR ${OPENCV_DIR}/lib64) + message("OPENCV_LIB:" ${OPENCV_LIB_DIR}) + endif() + + set(OPENCV_3RD_LIB_DIR ${OPENCV_DIR}/share/OpenCV/3rdparty/lib) + if(EXISTS "${OPENCV_3RD_LIB_DIR}") + message("OPENCV_3RD_LIB_DIR:" ${OPENCV_3RD_LIB_DIR}) + else() + set(OPENCV_3RD_LIB_DIR ${OPENCV_DIR}/share/OpenCV/3rdparty/lib64) + message("OPENCV_3RD_LIB_DIR:" ${OPENCV_3RD_LIB_DIR}) + endif() + + set(DEPS ${DEPS} ${OPENCV_LIB_DIR}/libopencv_imgcodecs${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_LIB_DIR}/libopencv_imgproc${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_LIB_DIR}/libopencv_core${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_LIB_DIR}/libopencv_highgui${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/libIlmImf${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/liblibjasper${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/liblibpng${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/liblibtiff${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/libittnotify${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/liblibjpeg-turbo${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/liblibwebp${CMAKE_STATIC_LIBRARY_SUFFIX}) + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/libzlib${CMAKE_STATIC_LIBRARY_SUFFIX}) + if(EXISTS "${OPENCV_3RD_LIB_DIR}/libippiw${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/libippiw${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() + if(EXISTS "${OPENCV_3RD_LIB_DIR}/libippicv${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(DEPS ${DEPS} ${OPENCV_3RD_LIB_DIR}/libippicv${CMAKE_STATIC_LIBRARY_SUFFIX}) + endif() +endif() +# message(${CMAKE_CXX_FLAGS}) +# set(CMAKE_CXX_FLAGS "-g ${CMAKE_CXX_FLAGS}") + +SET(PADDLESEG_INFERENCE_SRCS preprocessor/preprocessor.cpp + preprocessor/preprocessor_detection.cpp predictor/detection_predictor.cpp + utils/detection_result.pb.cc) + +ADD_LIBRARY(libpaddleseg_inference STATIC ${PADDLESEG_INFERENCE_SRCS}) +target_link_libraries(libpaddleseg_inference ${DEPS}) + +add_executable(detection_demo detection_demo.cpp) + +ADD_DEPENDENCIES(libpaddleseg_inference ext-yaml-cpp) +ADD_DEPENDENCIES(detection_demo ext-yaml-cpp libpaddleseg_inference) +target_link_libraries(detection_demo ${DEPS} libpaddleseg_inference) + +if (WIN32) + add_custom_command(TARGET detection_demo POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./mklml.dll + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./libiomp5md.dll + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll + COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_DIR}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll + ) +endif() + +execute_process(COMMAND cp -r ${CMAKE_SOURCE_DIR}/images ${CMAKE_SOURCE_DIR}/conf ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/PaddleCV/PaddleDetection/inference/LICENSE b/PaddleCV/PaddleDetection/inference/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/PaddleCV/PaddleDetection/inference/README.md b/PaddleCV/PaddleDetection/inference/README.md new file mode 100644 index 0000000000000000000000000000000000000000..302b5fb3818df8c1ca871095ff368129ce1292fd --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/README.md @@ -0,0 +1,171 @@ +# PaddleDetection C++预测部署方案 + +## 本文档结构 + +[1.说明](#1说明) + +[2.主要目录和文件](#2主要目录和文件) + +[3.编译](#3编译) + +[4.预测并可视化结果](#4预测并可视化结果) + + + + +## 1.说明 + +本目录提供一个跨平台的图像检测模型的C++预测部署方案,用户通过一定的配置,加上少量的代码,即可把模型集成到自己的服务中,完成相应的图像检测任务。 + +主要设计的目标包括以下四点: +- 跨平台,支持在 Windows 和 Linux 完成编译、开发和部署 +- 可扩展性,支持用户针对新模型开发自己特殊的数据预处理等逻辑 +- 高性能,除了`PaddlePaddle`自身带来的性能优势,我们还针对图像检测的特点对关键步骤进行了性能优化 +- 支持多种常见的图像检测模型,如YOLOv3, Faster-RCNN, Faster-RCNN+FPN,用户通过少量配置即可加载模型完成常见检测任务 + +## 2.主要目录和文件 + +```bash +deploy +├── detection_demo.cpp # 完成图像检测预测任务C++代码 +│ +├── conf +│ ├── detection_rcnn.yaml #示例faster rcnn 目标检测配置 +│ └── detection_rcnn_fpn.yaml #示例faster rcnn + fpn目标检测配置 +├── images +│ └── detection_rcnn # 示例faster rcnn + fpn目标检测测试图片目录 +├── tools +│ └── vis.py # 示例图像检测结果可视化脚本 +├── docs +│ ├── linux_build.md # Linux 编译指南 +│ ├── windows_vs2015_build.md # windows VS2015编译指南 +│ └── windows_vs2019_build.md # Windows VS2019编译指南 +│ +├── utils # 一些基础公共函数 +│ +├── preprocess # 数据预处理相关代码 +│ +├── predictor # 模型加载和预测相关代码 +│ +├── CMakeList.txt # cmake编译入口文件 +│ +└── external-cmake # 依赖的外部项目cmake(目前仅有yaml-cpp) + +``` + +## 3.编译 +支持在`Windows`和`Linux`平台编译和使用: +- [Linux 编译指南](./docs/linux_build.md) +- [Windows 使用 Visual Studio 2019 Community 编译指南](./docs/windows_vs2019_build.md) +- [Windows 使用 Visual Studio 2015 编译指南](./docs/windows_vs2015_build.md) + +`Windows`上推荐使用最新的`Visual Studio 2019 Community`直接编译`CMake`项目。 + +## 4.预测并可视化结果 + +完成编译后,便生成了需要的可执行文件和链接库。这里以我们基于`faster rcnn`检测模型为例,介绍部署图像检测模型的通用流程。 + +### 1. 下载模型文件 +我们提供faster rcnn,faster rcnn+fpn模型用于预测coco17数据集,可在以下链接下载:[faster rcnn示例模型下载地址](https://paddleseg.bj.bcebos.com/inference/faster_rcnn_pp50.zip), + [faster rcnn + fpn示例模型下载地址](https://paddleseg.bj.bcebos.com/inference/faster_rcnn_pp50_fpn.zip)。 + +下载并解压,解压后目录结构如下: +``` +faster_rcnn_pp50/ +├── __model__ # 模型文件 +│ +└── __params__ # 参数文件 +``` +解压后把上述目录拷贝到合适的路径: + +**假设**`Windows`系统上,我们模型和参数文件所在路径为`D:\projects\models\faster_rcnn_pp50`。 + +**假设**`Linux`上对应的路径则为`/root/projects/models/faster_rcnn_pp50/`。 + + +### 2. 修改配置 + +`inference`源代码(即本目录)的`conf`目录下提供了示例基于faster rcnn的配置文件`detection_rcnn.yaml`, 相关的字段含义和说明如下: + +```yaml +DEPLOY: + # 是否使用GPU预测 + USE_GPU: 1 + # 模型和参数文件所在目录路径 + MODEL_PATH: "/root/projects/models/faster_rcnn_pp50" + # 模型文件名 + MODEL_FILENAME: "__model__" + # 参数文件名 + PARAMS_FILENAME: "__params__" + # 预测图片的标准输入,尺寸不一致会resize + EVAL_CROP_SIZE: (608, 608) + # resize方式,支持 UNPADDING和RANGE_SCALING + RESIZE_TYPE: "RANGE_SCALING" + # 短边对齐的长度,仅在RANGE_SCALING下有效 + TARGET_SHORT_SIZE : 800 + # 均值 + MEAN: [0.4647, 0.4647, 0.4647] + # 方差 + STD: [0.0834, 0.0834, 0.0834] + # 图片类型, rgb或者rgba + IMAGE_TYPE: "rgb" + # 像素分类数 + NUM_CLASSES: 1 + # 通道数 + CHANNELS : 3 + # 预处理器, 目前提供图像检测的通用处理类DetectionPreProcessor + PRE_PROCESSOR: "DetectionPreProcessor" + # 预测模式,支持 NATIVE 和 ANALYSIS + PREDICTOR_MODE: "ANALYSIS" + # 每次预测的 batch_size + BATCH_SIZE : 3 + # 长边伸缩的最大长度,-1代表无限制。 + RESIZE_MAX_SIZE: 1333 + # 输入的tensor数量。 + FEEDS_SIZE: 3 + +``` +修改字段`MODEL_PATH`的值为你在**上一步**下载并解压的模型文件所放置的目录即可。更多配置文件字段介绍,请参考文档[预测部署方案配置文件说明](./docs/configuration.md)。 + +### 3. 执行预测 + +在终端中切换到生成的可执行文件所在目录为当前目录(Windows系统为`cmd`)。 + +`Linux` 系统中执行以下命令: +```shell +./detection_demo --conf=conf/detection_rcnn.yaml --input_dir=images/detection_rcnn +``` +`Windows` 中执行以下命令: +```shell +.\detection_demo.exe --conf=conf\detection_rcnn.yaml --input_dir=images\detection_rcnn\ +``` + + +预测使用的两个命令参数说明如下: + +| 参数 | 含义 | +|-------|----------| +| conf | 模型配置的Yaml文件路径 | +| input_dir | 需要预测的图片目录 | + +· +配置文件说明请参考上一步,样例程序会扫描input_dir目录下的所有图片,并为每一张图片生成对应的预测结果,输出到屏幕,并在`X`同一目录下保存到`X.pb文件`(X为对应图片的文件名)。可使用工具脚本vis.py将检测结果可视化。 + +**检测结果可视化** + +运行可视化脚本时,只需输入命令行参数图片路径、检测结果pb文件路径、目标框阈值以及类别-标签映射文件路径即可得到可视化的图片`X.png` (tools目录下提供coco17的类别标签映射文件coco17.json)。 + +```bash +python vis.py --img_path=../build/images/detection_rcnn/000000087038.jpg --img_result_path=../build/images/detection_rcnn/000000087038.jpg.pb --threshold=0.1 --c2l_path=coco17.json +``` + +检测结果(每个图片的结果用空行隔开) + +```原图:``` + +![原图](./demo_images/000000087038.jpg) + +```检测结果图:``` + +![检测结果](./demo_images/000000087038.jpg.png) + diff --git a/PaddleCV/PaddleDetection/inference/conf/detection_rcnn.yaml b/PaddleCV/PaddleDetection/inference/conf/detection_rcnn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..50c23fbb3e53ff159844e65da4ed194e169cffb6 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/conf/detection_rcnn.yaml @@ -0,0 +1,18 @@ +DEPLOY: + USE_GPU: 1 + MODEL_PATH: "/root/projects/models/faster_rcnn_pp50" + MODEL_FILENAME: "__model__" + PARAMS_FILENAME: "__params__" + EVAL_CROP_SIZE: (608, 608) + RESIZE_TYPE: "RANGE_SCALING" + TARGET_SHORT_SIZE : 800 + MEAN: [0.485, 0.456, 0.406] + STD: [0.229, 0.224, 0.225] + IMAGE_TYPE: "rgb" + NUM_CLASSES: 1 + CHANNELS : 3 + PRE_PROCESSOR: "DetectionPreProcessor" + PREDICTOR_MODE: "ANALYSIS" + BATCH_SIZE : 3 + RESIZE_MAX_SIZE: 1333 + FEEDS_SIZE: 3 diff --git a/PaddleCV/PaddleDetection/inference/conf/detection_rcnn_fpn.yaml b/PaddleCV/PaddleDetection/inference/conf/detection_rcnn_fpn.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9d6635ef8c2b29fb0ca9318d1ec08f1f7be037f7 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/conf/detection_rcnn_fpn.yaml @@ -0,0 +1,19 @@ +DEPLOY: + USE_GPU: 1 + MODEL_PATH: "/root/projects/models/faster_rcnn_pp50_fpn" + MODEL_FILENAME: "__model__" + PARAMS_FILENAME: "__params__" + EVAL_CROP_SIZE: (608, 608) + RESIZE_TYPE: "RANGE_SCALING" + TARGET_SHORT_SIZE : 800 + MEAN: [0.485, 0.456, 0.406] + STD: [0.229, 0.224, 0.225] + IMAGE_TYPE: "rgb" + NUM_CLASSES: 1 + CHANNELS : 3 + PRE_PROCESSOR: "DetectionPreProcessor" + PREDICTOR_MODE: "ANALYSIS" + BATCH_SIZE : 1 + RESIZE_MAX_SIZE: 1333 + FEEDS_SIZE: 3 + COARSEST_STRIDE: 32 diff --git a/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg b/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9f77f5d5f057b6f92dc096da704ecb8dee99bdf5 Binary files /dev/null and b/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg differ diff --git a/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg.png b/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg.png new file mode 100644 index 0000000000000000000000000000000000000000..aa2c63d1c3dd1ca08d517239842ce5bd40310d01 Binary files /dev/null and b/PaddleCV/PaddleDetection/inference/demo_images/000000087038.jpg.png differ diff --git a/PaddleCV/PaddleDetection/inference/detection_demo.cpp b/PaddleCV/PaddleDetection/inference/detection_demo.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7e711ed6970358c528a3198bb6168a871d83d380 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/detection_demo.cpp @@ -0,0 +1,42 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +DEFINE_string(conf, "", "Configuration File Path"); +DEFINE_string(input_dir, "", "Directory of Input Images"); + +int main(int argc, char** argv) { + // 0. parse args + google::ParseCommandLineFlags(&argc, &argv, true); + if (FLAGS_conf.empty() || FLAGS_input_dir.empty()) { + std::cout << "Usage: ./predictor --conf=/config/path/to/your/model --input_dir=/directory/of/your/input/images"; + return -1; + } + // 1. create a predictor and init it with conf + PaddleSolution::DetectionPredictor predictor; + if (predictor.init(FLAGS_conf) != 0) { + LOG(FATAL) << "Fail to init predictor"; + return -1; + } + + // 2. get all the images with extension '.jpeg' at input_dir + auto imgs = PaddleSolution::utils::get_directory_images(FLAGS_input_dir, ".jpeg|.jpg|.JPEG|.JPG|.bmp|.BMP|.png|.PNG"); + + // 3. predict + predictor.predict(imgs); + return 0; +} diff --git a/PaddleCV/PaddleDetection/inference/docs/configuration.md b/PaddleCV/PaddleDetection/inference/docs/configuration.md new file mode 100644 index 0000000000000000000000000000000000000000..cb2f761f792009c1accb52048d6e4f2cdcb6ad29 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/docs/configuration.md @@ -0,0 +1,75 @@ +# 预测部署方案配置文件说明 +## 基本概念 +预测部署方案的配置文件旨在给用户提供一个预测部署方案定制化接口。用户仅需理解该配置文件相关字段的含义,无需编写任何代码,即可定制化预测部署方案。为了更好地表达每个字段的含义,首先介绍配置文件中字段的类型。 + +### 字段类型 +- **required**: 表明该字段必须显式定义,否则无法正常启动预测部署程序。 +- **optional**: 表明该字段可忽略不写,预测部署系统会提供默认值,相关默认值将在下文介绍。 + +### 字段值类型 +- **int**:表明该字段必须赋予整型类型的值。 +- **string**:表明该字段必须赋予字符串类型的值。 +- **list**:表明该字段必须赋予列表的值。 +- **tuple**: 表明该字段必须赋予双元素元组的值。 + +## 字段介绍 + +```yaml +# 预测部署时所有配置字段需在DEPLOY字段下 +DEPLOY: + # 类型:required int + # 含义:是否使用GPU预测。 0:不使用 1:使用 + USE_GPU: 1 + # 类型:required string + # 含义:模型和参数文件所在目录 + MODEL_PATH: "/path/to/model_directory" + # 类型:required string + # 含义:模型文件名 + MODEL_FILENAME: "__model__" + # 类型:required string + # 含义:参数文件名 + PARAMS_FILENAME: "__params__" + # 类型:optional string + # 含义:图像resize的类型。支持 UNPADDING 和 RANGE_SCALING模式。默认是UNPADDING模式。 + RESIZE_TYPE: "UNPADDING" + # 类型:required tuple + # 含义:当使用UNPADDING模式时,会将图像直接resize到该尺寸。 + EVAL_CROP_SIZE: (513, 513) + # 类型:optional int + # 含义:当使用RANGE_SCALING模式时,图像短边需要对齐该字段的值,长边会同比例 + # 的缩放,从而在保持图像长宽比例不变的情况下resize到新的尺寸。默认值为0。 + TARGET_SHORT_SIZE: 800 + # 类型:optional int + # 含义: 当使用RANGE_SCALING模式时,长边不能缩放到比该字段的值大。默认值为0。 + RESIZE_MAX_SIZE: 1333 + # 类型:required list + # 含义:图像进行归一化预处理时的均值 + MEAN: [104.008, 116.669, 122.675] + # 类型:required list + # 含义:图像进行归一化预处理时的方差 + STD: [1.0, 1.0, 1.0] + # 类型:string + # 含义:图片类型, rgb 或者 rgba + IMAGE_TYPE: "rgb" + # 类型:required int + # 含义:图像分类类型数 + NUM_CLASSES: 2 + # 类型:required int + # 含义:图片通道数 + CHANNELS : 3 + # 类型:required string + # 含义:预处理方式,目前提供图像检测的通用预处理类DetectionPreProcessor. + PRE_PROCESSOR: "DetectionPreProcessor" + # 类型:required string + # 含义:预测模式,支持 NATIVE 和 ANALYSIS + PREDICTOR_MODE: "ANALYSIS" + # 类型:required int + # 含义:每次预测的 batch_size + BATCH_SIZE : 3 + # 类型:optional int + # 含义: 输入张量的个数。大部分模型不需要设置。 默认值为1. + FEEDS_SIZE: 2 + # 类型: optional int + # 含义: 将图像的边变为该字段的值的整数倍。默认值为1。 + COARSEST_STRIDE: 32 +``` \ No newline at end of file diff --git a/PaddleCV/PaddleDetection/inference/docs/linux_build.md b/PaddleCV/PaddleDetection/inference/docs/linux_build.md new file mode 100644 index 0000000000000000000000000000000000000000..2ad9e46383123efee47b941f97c8e7690c7b95d6 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/docs/linux_build.md @@ -0,0 +1,84 @@ +# Linux平台 编译指南 + +## 说明 +本文档在 `Linux`平台使用`GCC 4.8.5` 和 `GCC 4.9.4`测试过,如果需要使用更高G++版本编译使用,则需要重新编译Paddle预测库,请参考: [从源码编译Paddle预测库](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_usage/deploy/inference/build_and_install_lib_cn.html#id15)。 + +## 前置条件 +* G++ 4.8.2 ~ 4.9.4 +* CUDA 8.0/ CUDA 9.0 +* CMake 3.0+ + +请确保系统已经安装好上述基本软件,**下面所有示例以工作目录为 `/root/projects/`演示**。 + +### Step1: 下载代码 + +1. `mkdir -p /root/projects/paddle_models && cd /root/projects/paddle_models` +2. `git clone https://github.com/PaddlePaddle/models.git` + +`C++`预测代码在`/root/projects/paddle_models/models/PaddleCV/PaddleDetection/inference` 目录,该目录不依赖任何`PaddleDetection`下其他目录。 + + +### Step2: 下载PaddlePaddle C++ 预测库 fluid_inference + +目前仅支持`CUDA 8` 和 `CUDA 9`,请点击 [PaddlePaddle预测库下载地址](https://www.paddlepaddle.org.cn/documentation/docs/zh/develop/advanced_usage/deploy/inference/build_and_install_lib_cn.html)下载对应的版本(develop版本)。 + + +下载并解压后`/root/projects/fluid_inference`目录包含内容为: +``` +fluid_inference +├── paddle # paddle核心库和头文件 +| +├── third_party # 第三方依赖库和头文件 +| +└── version.txt # 版本和编译信息 +``` + +### Step3: 安装配置OpenCV + +```shell +# 0. 切换到/root/projects目录 +cd /root/projects +# 1. 下载OpenCV3.4.6版本源代码 +wget -c https://paddleseg.bj.bcebos.com/inference/opencv-3.4.6.zip +# 2. 解压 +unzip opencv-3.4.6.zip && cd opencv-3.4.6 +# 3. 创建build目录并编译, 这里安装到/usr/local/opencv3目录 +mkdir build && cd build +cmake .. -DCMAKE_INSTALL_PREFIX=/root/projects/opencv3 -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=OFF -DWITH_IPP=OFF -DBUILD_IPP_IW=OFF -DWITH_LAPACK=OFF -DWITH_EIGEN=OFF -DCMAKE_INSTALL_LIBDIR=lib64 -DWITH_ZLIB=ON -DBUILD_ZLIB=ON -DWITH_JPEG=ON -DBUILD_JPEG=ON -DWITH_PNG=ON -DBUILD_PNG=ON -DWITH_TIFF=ON -DBUILD_TIFF=ON +make -j4 +make install +``` + +**注意:** 上述操作完成后,`opencv` 被安装在 `/root/projects/opencv3` 目录。 + +### Step4: 编译 + +`CMake`编译时,涉及到四个编译参数用于指定核心依赖库的路径, 他们的定义如下: + +| 参数名 | 含义 | +| ---- | ---- | +| CUDA_LIB | cuda的库路径 | +| CUDNN_LIB | cuDnn的库路径| +| OPENCV_DIR | OpenCV的安装路径, | +| PADDLE_DIR | Paddle预测库的路径 | + +执行下列操作时,**注意**把对应的参数改为你的上述依赖库实际路径: + +```shell +cd /root/projects/paddle_models/models/PaddleCV/PaddleDetection/inference + +mkdir build && cd build +cmake .. -DWITH_GPU=ON -DPADDLE_DIR=/root/projects/fluid_inference -DCUDA_LIB=/usr/local/cuda/lib64/ -DOPENCV_DIR=/root/projects/opencv3/ -DCUDNN_LIB=/usr/local/cuda/lib64/ +make +``` + + +### Step5: 预测及可视化 + +执行命令: + +``` +./detection_demo --conf=/path/to/your/conf --input_dir=/path/to/your/input/data/directory +``` + +更详细说明请参考ReadMe文档: [预测和可视化部分](../README.md) diff --git a/PaddleCV/PaddleDetection/inference/docs/windows_vs2015_build.md b/PaddleCV/PaddleDetection/inference/docs/windows_vs2015_build.md new file mode 100644 index 0000000000000000000000000000000000000000..be1c0289d404c17e561928173b104228ea63dbda --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/docs/windows_vs2015_build.md @@ -0,0 +1,97 @@ +# Windows平台使用 Visual Studio 2015 编译指南 + +本文档步骤,我们同时在`Visual Studio 2015` 和 `Visual Studio 2019 Community` 两个版本进行了测试,我们推荐使用[`Visual Studio 2019`直接编译`CMake`项目](./windows_vs2019_build.md)。 + + +## 前置条件 +* Visual Studio 2015 +* CUDA 8.0/ CUDA 9.0 +* CMake 3.0+ + +请确保系统已经安装好上述基本软件,**下面所有示例以工作目录为 `D:\projects`演示**。 + +### Step1: 下载代码 + +1. 打开`cmd`, 执行 `cd D:\projects\paddle_models` +2. `git clone https://github.com/PaddlePaddle/models.git` + +`C++`预测库代码在`D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference` 目录,该目录不依赖任何`PaddleDetection`下其他目录。 + + +### Step2: 下载PaddlePaddle C++ 预测库 fluid_inference + +根据Windows环境,下载相应版本的PaddlePaddle预测库,并解压到`D:\projects\`目录 + +| CUDA | GPU | 下载地址 | +|------|------|--------| +| 8.0 | Yes | [fluid_inference.zip](https://bj.bcebos.com/v1/paddleseg/fluid_inference_win.zip) | +| 9.0 | Yes | [fluid_inference_cuda90.zip](https://paddleseg.bj.bcebos.com/fluid_inference_cuda9_cudnn7.zip) | + +解压后`D:\projects\fluid_inference`目录包含内容为: +``` +fluid_inference +├── paddle # paddle核心库和头文件 +| +├── third_party # 第三方依赖库和头文件 +| +└── version.txt # 版本和编译信息 +``` + +### Step3: 安装配置OpenCV + +1. 在OpenCV官网下载适用于Windows平台的3.4.6版本, [下载地址](https://sourceforge.net/projects/opencvlibrary/files/3.4.6/opencv-3.4.6-vc14_vc15.exe/download) +2. 运行下载的可执行文件,将OpenCV解压至指定目录,如`D:\projects\opencv` +3. 配置环境变量,如下流程所示 + - 我的电脑->属性->高级系统设置->环境变量 + - 在系统变量中找到Path(如没有,自行创建),并双击编辑 + - 新建,将opencv路径填入并保存,如`D:\projects\opencv\build\x64\vc14\bin` + +### Step4: 以VS2015为例编译代码 + +以下命令需根据自己系统中各相关依赖的路径进行修改 + +* 调用VS2015, 请根据实际VS安装路径进行调整,打开cmd命令行工具执行以下命令 +* 其他vs版本(比如vs2019),请查找到对应版本的`vcvarsall.bat`路径,替换本命令即可 + +``` +call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64 +``` + +* CMAKE编译工程 + * PADDLE_DIR: fluid_inference预测库路径 + * CUDA_LIB: CUDA动态库目录, 请根据实际安装情况调整 + * OPENCV_DIR: OpenCV解压目录 + +``` +# 切换到预测库所在目录 +cd /d D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference +# 创建构建目录, 重新构建只需要删除该目录即可 +mkdir build +cd build +# cmake构建VS项目 +D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference\build> cmake .. -G "Visual Studio 14 2015 Win64" -DWITH_GPU=ON -DPADDLE_DIR=D:\projects\fluid_inference -DCUDA_LIB=D:\projects\cudalib\v9.0\lib\x64 -DOPENCV_DIR=D:\projects\opencv -T host=x64 +``` + +这里的`cmake`参数`-G`, 表示生成对应的VS版本的工程,可以根据自己的`VS`版本调整,具体请参考[cmake文档](https://cmake.org/cmake/help/v3.15/manual/cmake-generators.7.html) + +* 生成可执行文件 + +``` +D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference\build> msbuild /m /p:Configuration=Release cpp_inference_demo.sln +``` + +### Step5: 预测及可视化 + +上述`Visual Studio 2015`编译产出的可执行文件在`build\release`目录下,切换到该目录: +``` +cd /d D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference\build\release +``` + +之后执行命令: + +``` +detection_demo.exe --conf=/path/to/your/conf --input_dir=/path/to/your/input/data/directory +``` + +更详细说明请参考ReadMe文档: [预测和可视化部分](../README.md) + diff --git a/PaddleCV/PaddleDetection/inference/docs/windows_vs2019_build.md b/PaddleCV/PaddleDetection/inference/docs/windows_vs2019_build.md new file mode 100644 index 0000000000000000000000000000000000000000..f3f589a9a246e494439b26f516ea319c270ff9ab --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/docs/windows_vs2019_build.md @@ -0,0 +1,102 @@ +# Visual Studio 2019 Community CMake 编译指南 + +Windows 平台下,我们使用`Visual Studio 2015` 和 `Visual Studio 2019 Community` 进行了测试。微软从`Visual Studio 2017`开始即支持直接管理`CMake`跨平台编译项目,但是直到`2019`才提供了稳定和完全的支持,所以如果你想使用CMake管理项目编译构建,我们推荐你使用`Visual Studio 2019`环境下构建。 + +你也可以使用和`VS2015`一样,通过把`CMake`项目转化成`VS`项目来编译,其中**有差别的部分**在文档中我们有说明,请参考:[使用Visual Studio 2015 编译指南](./windows_vs2015_build.md) + +## 前置条件 +* Visual Studio 2019 +* CUDA 8.0/ CUDA 9.0 +* CMake 3.0+ + +请确保系统已经安装好上述基本软件,我们使用的是`VS2019`的社区版。 + +**下面所有示例以工作目录为 `D:\projects`演示**。 + +### Step1: 下载代码 + +1. 点击下载源代码:[下载地址](https://github.com/PaddlePaddle/models/archive/develop.zip) +2. 解压,解压后目录重命名为`paddle_models` + +以下代码目录路径为`D:\projects\paddle_models` 为例。 + + +### Step2: 下载PaddlePaddle C++ 预测库 fluid_inference + +根据Windows环境,下载相应版本的PaddlePaddle预测库,并解压到`D:\projects\`目录 + +| CUDA | GPU | 下载地址 | +|------|------|--------| +| 8.0 | Yes | [fluid_inference.zip](https://bj.bcebos.com/v1/paddleseg/fluid_inference_win.zip) | +| 9.0 | Yes | [fluid_inference_cuda90.zip](https://paddleseg.bj.bcebos.com/fluid_inference_cuda9_cudnn7.zip) | + +解压后`D:\projects\fluid_inference`目录包含内容为: +``` +fluid_inference +├── paddle # paddle核心库和头文件 +| +├── third_party # 第三方依赖库和头文件 +| +└── version.txt # 版本和编译信息 +``` +**注意:** `CUDA90`版本解压后目录名称为`fluid_inference_cuda90`。 + +### Step3: 安装配置OpenCV + +1. 在OpenCV官网下载适用于Windows平台的3.4.6版本, [下载地址](https://sourceforge.net/projects/opencvlibrary/files/3.4.6/opencv-3.4.6-vc14_vc15.exe/download) +2. 运行下载的可执行文件,将OpenCV解压至指定目录,如`D:\projects\opencv` +3. 配置环境变量,如下流程所示 + - 我的电脑->属性->高级系统设置->环境变量 + - 在系统变量中找到Path(如没有,自行创建),并双击编辑 + - 新建,将opencv路径填入并保存,如`D:\projects\opencv\build\x64\vc14\bin` + +### Step4: 使用Visual Studio 2019直接编译CMake + +1. 打开Visual Studio 2019 Community,点击`继续但无需代码` +![step2](https://paddleseg.bj.bcebos.com/inference/vs2019_step1.png) +2. 点击: `文件`->`打开`->`CMake` +![step2.1](https://paddleseg.bj.bcebos.com/inference/vs2019_step2.png) + +选择项目代码所在路径,并打开`CMakeList.txt`: + +![step2.2](https://paddleseg.bj.bcebos.com/inference/vs2019_step3.png) + +3. 点击:`项目`->`cpp_inference_demo的CMake设置` + +![step3](https://paddleseg.bj.bcebos.com/inference/vs2019_step4.png) + +4. 点击`浏览`,分别设置编译选项指定`CUDA`、`OpenCV`、`Paddle预测库`的路径 + +![step4](https://paddleseg.bj.bcebos.com/inference/vs2019_step5.png) + +三个编译参数的含义说明如下: + +| 参数名 | 含义 | +| ---- | ---- | +| CUDA_LIB | cuda的库路径 | +| OPENCV_DIR | OpenCV的安装路径, | +| PADDLE_DIR | Paddle预测库的路径 | + +**设置完成后**, 点击上图中`保存并生成CMake缓存以加载变量`。 + +5. 点击`生成`->`全部生成` + +![step6](https://paddleseg.bj.bcebos.com/inference/vs2019_step6.png) + + +### Step5: 预测及可视化 + +上述`Visual Studio 2019`编译产出的可执行文件在`out\build\x64-Release`目录下,打开`cmd`,并切换到该目录: + +``` +cd D:\projects\paddle_models\models\PaddleCV\PaddleDetection\inference\build\x64-Release +``` + +之后执行命令: + +``` +detection_demo.exe --conf=/path/to/your/conf --input_dir=/path/to/your/input/data/directory +``` + +更详细说明请参考ReadMe文档: [预测和可视化部分](../README.md) + diff --git a/PaddleCV/PaddleDetection/inference/external-cmake/yaml-cpp.cmake b/PaddleCV/PaddleDetection/inference/external-cmake/yaml-cpp.cmake new file mode 100644 index 0000000000000000000000000000000000000000..15fa2674e00d85f1db7bbdfdceeebadaf0eabf5a --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/external-cmake/yaml-cpp.cmake @@ -0,0 +1,29 @@ + +find_package(Git REQUIRED) + +include(ExternalProject) + +message("${CMAKE_BUILD_TYPE}") + +ExternalProject_Add( + ext-yaml-cpp + GIT_REPOSITORY https://github.com/jbeder/yaml-cpp.git + GIT_TAG e0e01d53c27ffee6c86153fa41e7f5e57d3e5c90 + CMAKE_ARGS + -DYAML_CPP_BUILD_TESTS=OFF + -DYAML_CPP_BUILD_TOOLS=OFF + -DYAML_CPP_INSTALL=OFF + -DYAML_CPP_BUILD_CONTRIB=OFF + -DMSVC_SHARED_RT=OFF + -DBUILD_SHARED_LIBS=OFF + -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} + -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} + -DCMAKE_CXX_FLAGS_DEBUG=${CMAKE_CXX_FLAGS_DEBUG} + -DCMAKE_CXX_FLAGS_RELEASE=${CMAKE_CXX_FLAGS_RELEASE} + -DCMAKE_LIBRARY_OUTPUT_DIRECTORY=${CMAKE_BINARY_DIR}/ext/yaml-cpp/lib + -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY=${CMAKE_BINARY_DIR}/ext/yaml-cpp/lib + PREFIX "${CMAKE_BINARY_DIR}/ext/yaml-cpp" + # Disable install step + INSTALL_COMMAND "" + LOG_DOWNLOAD ON +) diff --git a/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000014439.jpg b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000014439.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0abbdab06eb5950b93908cc91adfa640e8a3ac78 Binary files /dev/null and b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000014439.jpg differ diff --git a/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000087038.jpg b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000087038.jpg new file mode 100644 index 0000000000000000000000000000000000000000..9f77f5d5f057b6f92dc096da704ecb8dee99bdf5 Binary files /dev/null and b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000087038.jpg differ diff --git a/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000570688.jpg b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000570688.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cb304bd56c4010c08611a30dcca58ea9140cea54 Binary files /dev/null and b/PaddleCV/PaddleDetection/inference/images/detection_rcnn/000000570688.jpg differ diff --git a/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.cpp b/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ba07e3b6c7fb2152bd7825950a3cd94769f36adc --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.cpp @@ -0,0 +1,383 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "detection_predictor.h" +#include +#include +#include +#include "utils/detection_result.pb.h" + +namespace PaddleSolution { + /* lod_buffer: every item in lod_buffer is an image matrix after preprocessing + * input_buffer: same data with lod_buffer after flattening to 1-D vector and padding, needed to be empty before using this function + */ + void padding_minibatch(const std::vector> &lod_buffer, std::vector &input_buffer, + std::vector &resize_heights, std::vector &resize_widths, int channels, int coarsest_stride = 1) { + int batch_size = lod_buffer.size(); + int max_h = -1; + int max_w = -1; + for(int i = 0; i < batch_size; ++i) { + max_h = (max_h > resize_heights[i])? max_h:resize_heights[i]; + max_w = (max_w > resize_widths[i])? max_w:resize_widths[i]; + } + max_h = static_cast(ceil(static_cast(max_h) / static_cast(coarsest_stride)) * coarsest_stride); + max_w = static_cast(ceil(static_cast(max_w) / static_cast(coarsest_stride)) * coarsest_stride); + std::cout << "max_w: " << max_w << " max_h: " << max_h << std::endl; + input_buffer.insert(input_buffer.end(), batch_size * channels * max_h * max_w, 0); + // flatten tensor and padding + for(int i = 0; i < lod_buffer.size(); ++i) { + float *input_buffer_ptr = input_buffer.data() + i * channels * max_h * max_w; + const float *lod_ptr = lod_buffer[i].data(); + for(int c = 0; c < channels; ++c) { + for(int h = 0; h < resize_heights[i]; ++h) { + memcpy(input_buffer_ptr, lod_ptr, resize_widths[i] * sizeof(float)); + lod_ptr += resize_widths[i]; + input_buffer_ptr += max_w; + } + input_buffer_ptr += (max_h - resize_heights[i]) * max_w; + } + } + // change resize w, h + for(int i = 0; i < batch_size; ++i){ + resize_widths[i] = max_w; + resize_heights[i] = max_h; + } + } + + void output_detection_result(const float* out_addr, const std::vector> &lod_vector, const std::vector &imgs_batch){ + for(int i = 0; i < lod_vector[0].size() - 1; ++i) { + DetectionResult detection_result; + detection_result.set_filename(imgs_batch[i]); + std::cout << imgs_batch[i] << ":" << std::endl; + for (int j = lod_vector[0][i]; j < lod_vector[0][i+1]; ++j) { + DetectionBox *box_ptr = detection_result.add_detection_boxes(); + box_ptr->set_class_(static_cast(round(out_addr[0 + j * 6]))); + box_ptr->set_score(out_addr[1 + j * 6]); + box_ptr->set_left_top_x(out_addr[2 + j * 6]); + box_ptr->set_left_top_y(out_addr[3 + j * 6]); + box_ptr->set_right_bottom_x(out_addr[4 + j * 6]); + box_ptr->set_right_bottom_y(out_addr[5 + j * 6]); + printf("Class %d, score = %f, left top = [%f, %f], right bottom = [%f, %f]\n", + static_cast(round(out_addr[0 + j * 6])), out_addr[1 + j * 6], out_addr[2 + j * 6], + out_addr[3 + j * 6], out_addr[4 + j * 6], out_addr[5 + j * 6]); + } + printf("\n"); + std::ofstream output(imgs_batch[i] + ".pb", std::ios::out | std::ios::trunc | std::ios::binary); + detection_result.SerializeToOstream(&output); + output.close(); + } + } + + int DetectionPredictor::init(const std::string& conf) { + if (!_model_config.load_config(conf)) { + LOG(FATAL) << "Fail to load config file: [" << conf << "]"; + return -1; + } + _preprocessor = PaddleSolution::create_processor(conf); + if (_preprocessor == nullptr) { + LOG(FATAL) << "Failed to create_processor"; + return -1; + } + + bool use_gpu = _model_config._use_gpu; + const auto& model_dir = _model_config._model_path; + const auto& model_filename = _model_config._model_file_name; + const auto& params_filename = _model_config._param_file_name; + + // load paddle model file + if (_model_config._predictor_mode == "NATIVE") { + paddle::NativeConfig config; + auto prog_file = utils::path_join(model_dir, model_filename); + auto param_file = utils::path_join(model_dir, params_filename); + config.prog_file = prog_file; + config.param_file = param_file; + config.fraction_of_gpu_memory = 0; + config.use_gpu = use_gpu; + config.device = 0; + _main_predictor = paddle::CreatePaddlePredictor(config); + } else if (_model_config._predictor_mode == "ANALYSIS") { + paddle::AnalysisConfig config; + if (use_gpu) { + config.EnableUseGpu(100, 0); + } + auto prog_file = utils::path_join(model_dir, model_filename); + auto param_file = utils::path_join(model_dir, params_filename); + config.SetModel(prog_file, param_file); + config.SwitchUseFeedFetchOps(false); + config.SwitchSpecifyInputNames(true); + config.EnableMemoryOptim(); + _main_predictor = paddle::CreatePaddlePredictor(config); + } else { + return -1; + } + return 0; + + } + + int DetectionPredictor::predict(const std::vector& imgs) { + if (_model_config._predictor_mode == "NATIVE") { + return native_predict(imgs); + } + else if (_model_config._predictor_mode == "ANALYSIS") { + return analysis_predict(imgs); + } + return -1; + } + + int DetectionPredictor::native_predict(const std::vector& imgs) { + int config_batch_size = _model_config._batch_size; + + int channels = _model_config._channels; + int eval_width = _model_config._resize[0]; + int eval_height = _model_config._resize[1]; + std::size_t total_size = imgs.size(); + int default_batch_size = std::min(config_batch_size, (int)total_size); + int batch = total_size / default_batch_size + ((total_size % default_batch_size) != 0); + int batch_buffer_size = default_batch_size * channels * eval_width * eval_height; + + auto& input_buffer = _buffer; + auto& imgs_batch = _imgs_batch; + float sr; + // DetectionResultsContainer result_container; + for (int u = 0; u < batch; ++u) { + int batch_size = default_batch_size; + if (u == (batch - 1) && (total_size % default_batch_size)) { + batch_size = total_size % default_batch_size; + } + + int real_buffer_size = batch_size * channels * eval_width * eval_height; + std::vector feeds; + input_buffer.clear(); + imgs_batch.clear(); + for (int i = 0; i < batch_size; ++i) { + int idx = u * default_batch_size + i; + imgs_batch.push_back(imgs[idx]); + } + std::vector ori_widths; + std::vector ori_heights; + std::vector resize_widths; + std::vector resize_heights; + std::vector scale_ratios; + ori_widths.resize(batch_size); + ori_heights.resize(batch_size); + resize_widths.resize(batch_size); + resize_heights.resize(batch_size); + scale_ratios.resize(batch_size); + std::vector> lod_buffer(batch_size); + if (!_preprocessor->batch_process(imgs_batch, lod_buffer, ori_widths.data(), ori_heights.data(), + resize_widths.data(), resize_heights.data(), scale_ratios.data())) { + return -1; + } + // flatten and padding + padding_minibatch(lod_buffer, input_buffer, resize_heights, resize_widths, channels, _model_config._coarsest_stride); + paddle::PaddleTensor im_tensor, im_size_tensor, im_info_tensor; + + im_tensor.name = "image"; + im_tensor.shape = std::vector({ batch_size, channels, resize_heights[0], resize_widths[0] }); + im_tensor.data.Reset(input_buffer.data(), input_buffer.size() * sizeof(float)); + im_tensor.dtype = paddle::PaddleDType::FLOAT32; + + std::vector image_infos; + for(int i = 0; i < batch_size; ++i) { + image_infos.push_back(resize_heights[i]); + image_infos.push_back(resize_widths[i]); + image_infos.push_back(scale_ratios[i]); + } + im_info_tensor.name = "info"; + im_info_tensor.shape = std::vector({batch_size, 3}); + im_info_tensor.data.Reset(image_infos.data(), batch_size * 3 * sizeof(float)); + im_info_tensor.dtype = paddle::PaddleDType::FLOAT32; + + std::vector image_size; + for(int i = 0; i < batch_size; ++i) { + image_size.push_back(ori_heights[i]); + image_size.push_back(ori_widths[i]); + } + + std::vector image_size_f; + for(int i = 0; i < batch_size; ++i) { + image_size_f.push_back(ori_heights[i]); + image_size_f.push_back(ori_widths[i]); + image_size_f.push_back(1.0); + } + + int feeds_size = _model_config._feeds_size; + im_size_tensor.name = "im_size"; + if(feeds_size == 2) { + im_size_tensor.shape = std::vector({ batch_size, 2}); + im_size_tensor.data.Reset(image_size.data(), batch_size * 2 * sizeof(int)); + im_size_tensor.dtype = paddle::PaddleDType::INT32; + } + else if(feeds_size == 3) { + im_size_tensor.shape = std::vector({ batch_size, 3}); + im_size_tensor.data.Reset(image_size_f.data(), batch_size * 3 * sizeof(float)); + im_size_tensor.dtype = paddle::PaddleDType::FLOAT32; + } + std::cout << "Feed size = " << feeds_size << std::endl; + feeds.push_back(im_tensor); + if(_model_config._feeds_size > 2) { + feeds.push_back(im_info_tensor); + } + feeds.push_back(im_size_tensor); + _outputs.clear(); + + auto t1 = std::chrono::high_resolution_clock::now(); + if (!_main_predictor->Run(feeds, &_outputs, batch_size)) { + LOG(ERROR) << "Failed: NativePredictor->Run() return false at batch: " << u; + continue; + } + auto t2 = std::chrono::high_resolution_clock::now(); + auto duration = std::chrono::duration_cast(t2 - t1).count(); + std::cout << "runtime = " << duration << std::endl; + std::cout << "Number of outputs:" << _outputs.size() << std::endl; + int out_num = 1; + // print shape of first output tensor for debugging + std::cout << "size of outputs[" << 0 << "]: ("; + for (int j = 0; j < _outputs[0].shape.size(); ++j) { + out_num *= _outputs[0].shape[j]; + std::cout << _outputs[0].shape[j] << ","; + } + std::cout << ")" << std::endl; + + // const size_t nums = _outputs.front().data.length() / sizeof(float); + // if (out_num % batch_size != 0 || out_num != nums) { + // LOG(ERROR) << "outputs data size mismatch with shape size."; + // return -1; + // } + float* out_addr = (float *)(_outputs[0].data.data()); + output_detection_result(out_addr, _outputs[0].lod, imgs_batch); + } + return 0; + } + + int DetectionPredictor::analysis_predict(const std::vector& imgs) { + + int config_batch_size = _model_config._batch_size; + int channels = _model_config._channels; + int eval_width = _model_config._resize[0]; + int eval_height = _model_config._resize[1]; + auto total_size = imgs.size(); + int default_batch_size = std::min(config_batch_size, (int)total_size); + int batch = total_size / default_batch_size + ((total_size % default_batch_size) != 0); + int batch_buffer_size = default_batch_size * channels * eval_width * eval_height; + + auto& input_buffer = _buffer; + auto& imgs_batch = _imgs_batch; + //DetectionResultsContainer result_container; + for (int u = 0; u < batch; ++u) { + int batch_size = default_batch_size; + if (u == (batch - 1) && (total_size % default_batch_size)) { + batch_size = total_size % default_batch_size; + } + + int real_buffer_size = batch_size * channels * eval_width * eval_height; + std::vector feeds; + //input_buffer.resize(real_buffer_size); + input_buffer.clear(); + imgs_batch.clear(); + for (int i = 0; i < batch_size; ++i) { + int idx = u * default_batch_size + i; + imgs_batch.push_back(imgs[idx]); + } + + std::vector ori_widths; + std::vector ori_heights; + std::vector resize_widths; + std::vector resize_heights; + std::vector scale_ratios; + ori_widths.resize(batch_size); + ori_heights.resize(batch_size); + resize_widths.resize(batch_size); + resize_heights.resize(batch_size); + scale_ratios.resize(batch_size); + + std::vector> lod_buffer(batch_size); + if (!_preprocessor->batch_process(imgs_batch, lod_buffer, ori_widths.data(), ori_heights.data(), + resize_widths.data(), resize_heights.data(), scale_ratios.data())){ + std::cout << "Failed to preprocess!" << std::endl; + return -1; + } + + //flatten tensor + padding_minibatch(lod_buffer, input_buffer, resize_heights, resize_widths, channels, _model_config._coarsest_stride); + + std::vector input_names = _main_predictor->GetInputNames(); + auto im_tensor = _main_predictor->GetInputTensor(input_names.front()); + im_tensor->Reshape({ batch_size, channels, resize_heights[0], resize_widths[0] }); + im_tensor->copy_from_cpu(input_buffer.data()); + + if(input_names.size() > 2){ + std::vector image_infos; + for(int i = 0; i < batch_size; ++i) { + image_infos.push_back(resize_heights[i]); + image_infos.push_back(resize_widths[i]); + image_infos.push_back(scale_ratios[i]); + } + auto im_info_tensor = _main_predictor->GetInputTensor(input_names[1]); + im_info_tensor->Reshape({batch_size, 3}); + im_info_tensor->copy_from_cpu(image_infos.data()); + } + + std::vector image_size; + for(int i = 0; i < batch_size; ++i) { + image_size.push_back(ori_heights[i]); + image_size.push_back(ori_widths[i]); + } + std::vector image_size_f; + for(int i = 0; i < batch_size; ++i) { + image_size_f.push_back(static_cast(ori_heights[i])); + image_size_f.push_back(static_cast(ori_widths[i])); + image_size_f.push_back(1.0); + } + + auto im_size_tensor = _main_predictor->GetInputTensor(input_names.back()); + if(input_names.size() > 2) { + im_size_tensor->Reshape({batch_size, 3}); + im_size_tensor->copy_from_cpu(image_size_f.data()); + } + else{ + im_size_tensor->Reshape({batch_size, 2}); + im_size_tensor->copy_from_cpu(image_size.data()); + } + + + auto t1 = std::chrono::high_resolution_clock::now(); + _main_predictor->ZeroCopyRun(); + auto t2 = std::chrono::high_resolution_clock::now(); + auto duration = std::chrono::duration_cast(t2 - t1).count(); + std::cout << "runtime = " << duration << std::endl; + + auto output_names = _main_predictor->GetOutputNames(); + auto output_t = _main_predictor->GetOutputTensor(output_names[0]); + std::vector out_data; + std::vector output_shape = output_t->shape(); + + int out_num = 1; + std::cout << "size of outputs[" << 0 << "]: ("; + for (int j = 0; j < output_shape.size(); ++j) { + out_num *= output_shape[j]; + std::cout << output_shape[j] << ","; + } + std::cout << ")" << std::endl; + + out_data.resize(out_num); + output_t->copy_to_cpu(out_data.data()); + + float* out_addr = (float *)(out_data.data()); + auto lod_vector = output_t->lod(); + output_detection_result(out_addr, lod_vector, imgs_batch); + } + return 0; + } +} diff --git a/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.h b/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.h new file mode 100644 index 0000000000000000000000000000000000000000..3bc4cfdd793291d7d89342c7fbccfdd558d1f004 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/predictor/detection_predictor.h @@ -0,0 +1,52 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace PaddleSolution { + class DetectionPredictor { + public: + // init a predictor with a yaml config file + int init(const std::string& conf); + // predict api + int predict(const std::vector& imgs); + + private: + int native_predict(const std::vector& imgs); + int analysis_predict(const std::vector& imgs); + private: + std::vector _buffer; + std::vector _imgs_batch; + std::vector _outputs; + + PaddleSolution::PaddleModelConfigPaser _model_config; + std::shared_ptr _preprocessor; + std::unique_ptr _main_predictor; + }; +} diff --git a/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.cpp b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dbe7bcf624b649c02297bddd593d173b57550f17 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.cpp @@ -0,0 +1,43 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include "preprocessor.h" +#include "preprocessor_detection.h" + +namespace PaddleSolution { + + std::shared_ptr create_processor(const std::string& conf_file) { + + auto config = std::make_shared(); + if (!config->load_config(conf_file)) { + LOG(FATAL) << "fail to laod conf file [" << conf_file << "]"; + return nullptr; + } + + if (config->_pre_processor == "DetectionPreProcessor") { + auto p = std::make_shared(); + if (!p->init(config)) { + return nullptr; + } + return p; + } + + + LOG(FATAL) << "unknown processor_name [" << config->_pre_processor << "]"; + + return nullptr; + } +} diff --git a/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.h b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.h new file mode 100644 index 0000000000000000000000000000000000000000..a3fb2e029c8acf92010a258dd2824b85a0f2f90f --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor.h @@ -0,0 +1,64 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include +#include +#include + +#include +#include +#include + +#include "utils/conf_parser.h" + +namespace PaddleSolution { + +class ImagePreProcessor { +protected: + ImagePreProcessor() {}; + +public: + virtual ~ImagePreProcessor() {} + + virtual bool single_process(const std::string& fname, float* data, int* ori_w, int* ori_h) { + return true; + } + + virtual bool batch_process(const std::vector& imgs, float* data, int* ori_w, int* ori_h) { + return true; + } + + virtual bool single_process(const std::string& fname, float* data) { + return true; + } + + virtual bool batch_process(const std::vector& imgs, float* data) { + return true; + } + + virtual bool single_process(const std::string& fname, std::vector &data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio) { + return true; + } + + virtual bool batch_process(const std::vector& imgs, std::vector> &data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio) { + return true; + } + +}; // end of class ImagePreProcessor + +std::shared_ptr create_processor(const std::string &config_file); + +} // end of namespace paddle_solution + diff --git a/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.cpp b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ba8fd0e328c5a859e2d4b88adba0e56e5e3a7476 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.cpp @@ -0,0 +1,130 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include + +#include "preprocessor_detection.h" +#include "utils/utils.h" + +namespace PaddleSolution { + bool DetectionPreProcessor::single_process(const std::string& fname, std::vector &vec_data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio) { + cv::Mat im1 = cv::imread(fname, -1); + cv::Mat im; + if(_config->_feeds_size == 3) { // faster rcnn + im1.convertTo(im, CV_32FC3, 1/255.0); + } + else if(_config->_feeds_size == 2){ //yolo v3 + im = im1; + } + if (im.data == nullptr || im.empty()) { + LOG(ERROR) << "Failed to open image: " << fname; + return false; + } + + int channels = im.channels(); + if (channels == 1) { + cv::cvtColor(im, im, cv::COLOR_GRAY2BGR); + } + channels = im.channels(); + if (channels != 3 && channels != 4) { + LOG(ERROR) << "Only support rgb(gray) and rgba image."; + return false; + } + *ori_w = im.cols; + *ori_h = im.rows; + cv::cvtColor(im, im, cv::COLOR_BGR2RGB); + //channels = im.channels(); + + //resize + int rw = im.cols; + int rh = im.rows; + float im_scale_ratio; + utils::scaling(_config->_resize_type, rw, rh, _config->_resize[0], _config->_resize[1], _config->_target_short_size, _config->_resize_max_size, im_scale_ratio); + cv::Size resize_size(rw, rh); + *resize_w = rw; + *resize_h = rh; + *scale_ratio = im_scale_ratio; + if (*ori_h != rh || *ori_w != rw) { + cv::Mat im_temp; + if(_config->_resize_type == utils::SCALE_TYPE::UNPADDING) { + cv::resize(im, im_temp, resize_size, 0, 0, cv::INTER_LINEAR); + } + else if(_config->_resize_type == utils::SCALE_TYPE::RANGE_SCALING) { + cv::resize(im, im_temp, cv::Size(), im_scale_ratio, im_scale_ratio, cv::INTER_LINEAR); + } + im = im_temp; + } + + vec_data.resize(channels * rw * rh); + float *data = vec_data.data(); + + float* pmean = _config->_mean.data(); + float* pscale = _config->_std.data(); + for (int h = 0; h < rh; ++h) { + const uchar* uptr = im.ptr(h); + const float* fptr = im.ptr(h); + int im_index = 0; + for (int w = 0; w < rw; ++w) { + for (int c = 0; c < channels; ++c) { + int top_index = (c * rh + h) * rw + w; + float pixel;// = static_cast(fptr[im_index]);// / 255.0; + if(_config->_feeds_size == 2){ //yolo v3 + pixel = static_cast(uptr[im_index++]) / 255.0; + } + else if(_config->_feeds_size == 3){ + pixel = fptr[im_index++]; + } + pixel = (pixel - pmean[c]) / pscale[c]; + data[top_index] = pixel; + } + } + } + return true; + } + + bool DetectionPreProcessor::batch_process(const std::vector& imgs, std::vector> &data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio) { + auto ic = _config->_channels; + auto iw = _config->_resize[0]; + auto ih = _config->_resize[1]; + std::vector threads; + for (int i = 0; i < imgs.size(); ++i) { + std::string path = imgs[i]; + int* width = &ori_w[i]; + int* height = &ori_h[i]; + int* resize_width = &resize_w[i]; + int* resize_height = &resize_h[i]; + float* sr = &scale_ratio[i]; + threads.emplace_back([this, &data, i, path, width, height, resize_width, resize_height, sr] { + std::vector buffer; + single_process(path, buffer, width, height, resize_width, resize_height, sr); + data[i] = buffer; + }); + } + for (auto& t : threads) { + if (t.joinable()) { + t.join(); + } + } + return true; + } + + bool DetectionPreProcessor::init(std::shared_ptr config) { + _config = config; + return true; + } + +} diff --git a/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.h b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.h new file mode 100644 index 0000000000000000000000000000000000000000..731329040423756151a2590d3ed0f46b2800191d --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/preprocessor/preprocessor_detection.h @@ -0,0 +1,36 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "preprocessor.h" + +namespace PaddleSolution { + + class DetectionPreProcessor : public ImagePreProcessor { + + public: + DetectionPreProcessor() : _config(nullptr) { + }; + + bool init(std::shared_ptr config); + + bool single_process(const std::string& fname, std::vector &data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio); + + bool batch_process(const std::vector& imgs, std::vector> &data, int* ori_w, int* ori_h, int* resize_w, int* resize_h, float* scale_ratio); + private: + std::shared_ptr _config; + }; + +} diff --git a/PaddleCV/PaddleDetection/inference/tools/coco17.json b/PaddleCV/PaddleDetection/inference/tools/coco17.json new file mode 100644 index 0000000000000000000000000000000000000000..d3bbbaad038534baacf6f86f78db5d32bce16238 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/tools/coco17.json @@ -0,0 +1,83 @@ +{ + "0" : "background", + "1" : "person", + "2" : "bicycle", + "3" : "car", + "4" : "motorcycle", + "5" : "airplane", + "6" : "bus", + "7" : "train", + "8" : "truck", + "9" : "boat", + "10" : "traffic light", + "11" : "fire hydrant", + "12" : "stop sign", + "13" : "parking meter", + "14" : "bench", + "15" : "bird", + "16" : "cat", + "17" : "dog", + "18" : "horse", + "19" : "sheep", + "20" : "cow", + "21" : "elephant", + "22" : "bear", + "23" : "zebra", + "24" : "giraffe", + "25" : "backpack", + "26" : "umbrella", + "27" : "handbag", + "28" : "tie", + "29" : "suitcase", + "30" : "frisbee", + "31" : "skis", + "32" : "snowboard", + "33" : "sports ball", + "34" : "kite", + "35" : "baseball bat", + "36" : "baseball glove", + "37" : "skateboard", + "38" : "surfboard", + "39" : "tennis racket", + "40" : "bottle", + "41" : "wine glass", + "42" : "cup", + "43" : "fork", + "44" : "knife", + "45" : "spoon", + "46" : "bowl", + "47" : "banana", + "48" : "apple", + "49" : "sandwich", + "50" : "orange", + "51" : "broccoli", + "52" : "carrot", + "53" : "hot dog", + "54" : "pizza", + "55" : "donut", + "56" : "cake", + "57" : "chair", + "58" : "couch", + "59" : "potted plant", + "60" : "bed", + "61" : "dining table", + "62" : "toilet", + "63" : "tv", + "64" : "laptop", + "65" : "mouse", + "66" : "remote", + "67" : "keyboard", + "68" : "cell phone", + "69" : "microwave", + "70" : "oven", + "71" : "toaster", + "72" : "sink", + "73" : "refrigerator", + "74" : "book", + "75" : "clock", + "76" : "vase", + "77" : "scissors", + "78" : "teddy bear", + "79" : "hair drier", + "80" : "toothbrush" +} diff --git a/PaddleCV/PaddleDetection/inference/tools/detection_result_pb2.py b/PaddleCV/PaddleDetection/inference/tools/detection_result_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..3dc66f368b6fea72f70d6a5685b19f23e8021d51 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/tools/detection_result_pb2.py @@ -0,0 +1,151 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: detection_result.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='detection_result.proto', + package='PaddleSolution', + syntax='proto2', + serialized_pb=_b('\n\x16\x64\x65tection_result.proto\x12\x0ePaddleSolution\"\x84\x01\n\x0c\x44\x65tectionBox\x12\r\n\x05\x63lass\x18\x01 \x01(\x05\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x12\n\nleft_top_x\x18\x03 \x01(\x02\x12\x12\n\nleft_top_y\x18\x04 \x01(\x02\x12\x16\n\x0eright_bottom_x\x18\x05 \x01(\x02\x12\x16\n\x0eright_bottom_y\x18\x06 \x01(\x02\"Z\n\x0f\x44\x65tectionResult\x12\x10\n\x08\x66ilename\x18\x01 \x01(\t\x12\x35\n\x0f\x64\x65tection_boxes\x18\x02 \x03(\x0b\x32\x1c.PaddleSolution.DetectionBox') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_DETECTIONBOX = _descriptor.Descriptor( + name='DetectionBox', + full_name='PaddleSolution.DetectionBox', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='class', full_name='PaddleSolution.DetectionBox.class', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='score', full_name='PaddleSolution.DetectionBox.score', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='left_top_x', full_name='PaddleSolution.DetectionBox.left_top_x', index=2, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='left_top_y', full_name='PaddleSolution.DetectionBox.left_top_y', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='right_bottom_x', full_name='PaddleSolution.DetectionBox.right_bottom_x', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='right_bottom_y', full_name='PaddleSolution.DetectionBox.right_bottom_y', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=43, + serialized_end=175, +) + + +_DETECTIONRESULT = _descriptor.Descriptor( + name='DetectionResult', + full_name='PaddleSolution.DetectionResult', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='filename', full_name='PaddleSolution.DetectionResult.filename', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='detection_boxes', full_name='PaddleSolution.DetectionResult.detection_boxes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=177, + serialized_end=267, +) + +_DETECTIONRESULT.fields_by_name['detection_boxes'].message_type = _DETECTIONBOX +DESCRIPTOR.message_types_by_name['DetectionBox'] = _DETECTIONBOX +DESCRIPTOR.message_types_by_name['DetectionResult'] = _DETECTIONRESULT + +DetectionBox = _reflection.GeneratedProtocolMessageType('DetectionBox', (_message.Message,), dict( + DESCRIPTOR = _DETECTIONBOX, + __module__ = 'detection_result_pb2' + # @@protoc_insertion_point(class_scope:PaddleSolution.DetectionBox) + )) +_sym_db.RegisterMessage(DetectionBox) + +DetectionResult = _reflection.GeneratedProtocolMessageType('DetectionResult', (_message.Message,), dict( + DESCRIPTOR = _DETECTIONRESULT, + __module__ = 'detection_result_pb2' + # @@protoc_insertion_point(class_scope:PaddleSolution.DetectionResult) + )) +_sym_db.RegisterMessage(DetectionResult) + + +# @@protoc_insertion_point(module_scope) diff --git a/PaddleCV/PaddleDetection/inference/tools/vis.py b/PaddleCV/PaddleDetection/inference/tools/vis.py new file mode 100644 index 0000000000000000000000000000000000000000..1ca13bfbaf48669a78bf94344d378c37fe071f1a --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/tools/vis.py @@ -0,0 +1,104 @@ +# coding: utf-8 +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import detection_result_pb2 +import cv2 +import sys +import gflags +import numpy as np +import json +from PIL import Image, ImageDraw, ImageFont + +Flags = gflags.FLAGS +gflags.DEFINE_string('img_path', 'abc', 'image path') +gflags.DEFINE_string('img_result_path', 'def', 'image result path') +gflags.DEFINE_float('threshold', 0.0, 'threshold of score') +gflags.DEFINE_string('c2l_path', 'ghk', 'class to label path') + +def colormap(rgb=False): + """ + Get colormap + """ + color_list = np.array([ + 0.000, 0.447, 0.741, 0.850, 0.325, 0.098, 0.929, 0.694, 0.125, 0.494, + 0.184, 0.556, 0.466, 0.674, 0.188, 0.301, 0.745, 0.933, 0.635, 0.078, + 0.184, 0.300, 0.300, 0.300, 0.600, 0.600, 0.600, 1.000, 0.000, 0.000, + 1.000, 0.500, 0.000, 0.749, 0.749, 0.000, 0.000, 1.000, 0.000, 0.000, + 0.000, 1.000, 0.667, 0.000, 1.000, 0.333, 0.333, 0.000, 0.333, 0.667, + 0.000, 0.333, 1.000, 0.000, 0.667, 0.333, 0.000, 0.667, 0.667, 0.000, + 0.667, 1.000, 0.000, 1.000, 0.333, 0.000, 1.000, 0.667, 0.000, 1.000, + 1.000, 0.000, 0.000, 0.333, 0.500, 0.000, 0.667, 0.500, 0.000, 1.000, + 0.500, 0.333, 0.000, 0.500, 0.333, 0.333, 0.500, 0.333, 0.667, 0.500, + 0.333, 1.000, 0.500, 0.667, 0.000, 0.500, 0.667, 0.333, 0.500, 0.667, + 0.667, 0.500, 0.667, 1.000, 0.500, 1.000, 0.000, 0.500, 1.000, 0.333, + 0.500, 1.000, 0.667, 0.500, 1.000, 1.000, 0.500, 0.000, 0.333, 1.000, + 0.000, 0.667, 1.000, 0.000, 1.000, 1.000, 0.333, 0.000, 1.000, 0.333, + 0.333, 1.000, 0.333, 0.667, 1.000, 0.333, 1.000, 1.000, 0.667, 0.000, + 1.000, 0.667, 0.333, 1.000, 0.667, 0.667, 1.000, 0.667, 1.000, 1.000, + 1.000, 0.000, 1.000, 1.000, 0.333, 1.000, 1.000, 0.667, 1.000, 0.167, + 0.000, 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, + 0.000, 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, + 0.000, 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, + 0.833, 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.167, 0.000, 0.000, + 0.333, 0.000, 0.000, 0.500, 0.000, 0.000, 0.667, 0.000, 0.000, 0.833, + 0.000, 0.000, 1.000, 0.000, 0.000, 0.000, 0.143, 0.143, 0.143, 0.286, + 0.286, 0.286, 0.429, 0.429, 0.429, 0.571, 0.571, 0.571, 0.714, 0.714, + 0.714, 0.857, 0.857, 0.857, 1.000, 1.000, 1.000 + ]).astype(np.float32) + color_list = color_list.reshape((-1, 3)) * 255 + if not rgb: + color_list = color_list[:, ::-1] + return color_list + +if __name__ == "__main__": + if len(sys.argv) != 5: + print("Usage: python vis.py --img_path=/path/to/image --img_result_path=/path/to/image_result.pb --threshold=0.1 --c2l_path=/path/to/class2label.json") + else: + Flags(sys.argv) + color_list = colormap(rgb=True) + text_thickness = 1 + text_scale = 0.3 + with open(Flags.img_result_path, "rb") as f: + detection_result = detection_result_pb2.DetectionResult() + detection_result.ParseFromString(f.read()) + img = cv2.imread(Flags.img_path) + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + class2LabelMap = dict() + with open(Flags.c2l_path, "r", encoding="utf-8") as json_f: + class2LabelMap = json.load(json_f) + for box in detection_result.detection_boxes: + if box.score >= Flags.threshold: + box_class = getattr(box, 'class') + text_class_score_str = "%s %.2f" % (class2LabelMap.get(str(box_class)), box.score) + text_point = (int(box.left_top_x), int(box.left_top_y)) + + ptLeftTop = (int(box.left_top_x), int(box.left_top_y)) + ptRightBottom = (int(box.right_bottom_x), int(box.right_bottom_y)) + box_thickness = 1 + color = tuple([int(c) for c in color_list[box_class]]) + cv2.rectangle(img, ptLeftTop, ptRightBottom, color, box_thickness, 8) + if text_point[1] < 0: + text_point = (int(box.left_top_x), int(box.right_bottom_y)) + WHITE = (255, 255, 255) + font = cv2.FONT_HERSHEY_SIMPLEX + text_size = cv2.getTextSize(text_class_score_str, font, text_scale, text_thickness) + + text_box_left_top = (text_point[0], text_point[1] - text_size[0][1]) + text_box_right_bottom = (text_point[0] + text_size[0][0], text_point[1]) + + cv2.rectangle(img, text_box_left_top, text_box_right_bottom, color, -1, 8) + cv2.putText(img, text_class_score_str, text_point, font, text_scale, WHITE, text_thickness) + img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) + cv2.imwrite(Flags.img_path + ".png", img) diff --git a/PaddleCV/PaddleDetection/inference/utils/conf_parser.h b/PaddleCV/PaddleDetection/inference/utils/conf_parser.h new file mode 100644 index 0000000000000000000000000000000000000000..21944d032b2c24cdb584dc076a696560d4665ea1 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/utils/conf_parser.h @@ -0,0 +1,237 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include +#include +#include +#include + +#include +namespace PaddleSolution { + + class PaddleModelConfigPaser { + std::map _scaling_map; + public: + PaddleModelConfigPaser() + :_class_num(0), + _channels(0), + _use_gpu(0), + _batch_size(1), + _target_short_size(0), + _model_file_name("__model__"), + _param_file_name("__params__"), + _scaling_map{{"UNPADDING", 0}, + {"RANGE_SCALING",1}}, + _feeds_size(1), + _coarsest_stride(1) + { + } + ~PaddleModelConfigPaser() { + } + + void reset() { + _crop_size.clear(); + _resize.clear(); + _mean.clear(); + _std.clear(); + _img_type.clear(); + _class_num = 0; + _channels = 0; + _use_gpu = 0; + _target_short_size = 0; + _batch_size = 1; + _model_file_name = "__model__"; + _model_path = "./"; + _param_file_name="__params__"; + _resize_type = 0; + _resize_max_size = 0; + _feeds_size = 1; + _coarsest_stride = 1; + } + + std::string process_parenthesis(const std::string& str) { + if (str.size() < 2) { + return str; + } + std::string nstr(str); + if (str[0] == '(' && str.back() == ')') { + nstr[0] = '['; + nstr[str.size() - 1] = ']'; + } + return nstr; + } + + template + std::vector parse_str_to_vec(const std::string& str) { + std::vector data; + auto node = YAML::Load(str); + for (const auto& item : node) { + data.push_back(item.as()); + } + return data; + } + + bool load_config(const std::string& conf_file) { + + reset(); + + YAML::Node config = YAML::LoadFile(conf_file); + // 1. get resize + auto str = config["DEPLOY"]["EVAL_CROP_SIZE"].as(); + _resize = parse_str_to_vec(process_parenthesis(str)); + + // 0. get crop_size + if(config["DEPLOY"]["CROP_SIZE"].IsDefined()) { + auto crop_str = config["DEPLOY"]["CROP_SIZE"].as(); + _crop_size = parse_str_to_vec(process_parenthesis(crop_str)); + } + else { + _crop_size = _resize; + } + + // 2. get mean + for (const auto& item : config["DEPLOY"]["MEAN"]) { + _mean.push_back(item.as()); + } + + // 3. get std + for (const auto& item : config["DEPLOY"]["STD"]) { + _std.push_back(item.as()); + } + + // 4. get image type + _img_type = config["DEPLOY"]["IMAGE_TYPE"].as(); + // 5. get class number + _class_num = config["DEPLOY"]["NUM_CLASSES"].as(); + // 7. set model path + _model_path = config["DEPLOY"]["MODEL_PATH"].as(); + // 8. get model file_name + _model_file_name = config["DEPLOY"]["MODEL_FILENAME"].as(); + // 9. get model param file name + _param_file_name = config["DEPLOY"]["PARAMS_FILENAME"].as(); + // 10. get pre_processor + _pre_processor = config["DEPLOY"]["PRE_PROCESSOR"].as(); + // 11. use_gpu + _use_gpu = config["DEPLOY"]["USE_GPU"].as(); + // 12. predictor_mode + _predictor_mode = config["DEPLOY"]["PREDICTOR_MODE"].as(); + // 13. batch_size + _batch_size = config["DEPLOY"]["BATCH_SIZE"].as(); + // 14. channels + _channels = config["DEPLOY"]["CHANNELS"].as(); + // 15. target_short_size + if(config["DEPLOY"]["TARGET_SHORT_SIZE"].IsDefined()) { + _target_short_size = config["DEPLOY"]["TARGET_SHORT_SIZE"].as(); + } + // 16.resize_type + if(config["DEPLOY"]["RESIZE_TYPE"].IsDefined() && + _scaling_map.find(config["DEPLOY"]["RESIZE_TYPE"].as()) != _scaling_map.end()) { + _resize_type = _scaling_map[config["DEPLOY"]["RESIZE_TYPE"].as()]; + } + else{ + _resize_type = 0; + } + // 17.resize_max_size + if(config["DEPLOY"]["RESIZE_MAX_SIZE"].IsDefined()) { + _resize_max_size = config["DEPLOY"]["RESIZE_MAX_SIZE"].as(); + } + // 18.feeds_size + if(config["DEPLOY"]["FEEDS_SIZE"].IsDefined()){ + _feeds_size = config["DEPLOY"]["FEEDS_SIZE"].as(); + } + // 19. coarsest_stride + if(config["DEPLOY"]["COARSEST_STRIDE"].IsDefined()) { + _coarsest_stride = config["DEPLOY"]["COARSEST_STRIDE"].as(); + } + return true; + } + + void debug() const { + + std::cout << "SCALE_RESIZE: (" << _resize[0] << ", " << _resize[1] << ")" << std::endl; + + std::cout << "MEAN: ["; + for (int i = 0; i < _mean.size(); ++i) { + if (i != _mean.size() - 1) { + std::cout << _mean[i] << ", "; + } else { + std::cout << _mean[i]; + } + } + std::cout << "]" << std::endl; + + std::cout << "STD: ["; + for (int i = 0; i < _std.size(); ++i) { + if (i != _std.size() - 1) { + std::cout << _std[i] << ", "; + } + else { + std::cout << _std[i]; + } + } + std::cout << "]" << std::endl; + std::cout << "DEPLOY.TARGET_SHORT_SIZE: " << _target_short_size << std::endl; + std::cout << "DEPLOY.IMAGE_TYPE: " << _img_type << std::endl; + std::cout << "DEPLOY.NUM_CLASSES: " << _class_num << std::endl; + std::cout << "DEPLOY.CHANNELS: " << _channels << std::endl; + std::cout << "DEPLOY.MODEL_PATH: " << _model_path << std::endl; + std::cout << "DEPLOY.MODEL_FILENAME: " << _model_file_name << std::endl; + std::cout << "DEPLOY.PARAMS_FILENAME: " << _param_file_name << std::endl; + std::cout << "DEPLOY.PRE_PROCESSOR: " << _pre_processor << std::endl; + std::cout << "DEPLOY.USE_GPU: " << _use_gpu << std::endl; + std::cout << "DEPLOY.PREDICTOR_MODE: " << _predictor_mode << std::endl; + std::cout << "DEPLOY.BATCH_SIZE: " << _batch_size << std::endl; + } + //DEPLOY.COARSEST_STRIDE + int _coarsest_stride; + // DEPLOY.FEEDS_SIZE + int _feeds_size; + // DEPLOY.RESIZE_TYPE 0:unpadding 1:rangescaling Default:0 + int _resize_type; + // DEPLOY.RESIZE_MAX_SIZE + int _resize_max_size; + // DEPLOY.CROP_SIZE + std::vector _crop_size; + // DEPLOY.SCALE_RESIZE + std::vector _resize; + // DEPLOY.MEAN + std::vector _mean; + // DEPLOY.STD + std::vector _std; + // DEPLOY.IMAGE_TYPE + std::string _img_type; + // DEPLOY.TARGET_SHORT_SIZE + int _target_short_size; + // DEPLOY.NUM_CLASSES + int _class_num; + // DEPLOY.CHANNELS + int _channels; + // DEPLOY.MODEL_PATH + std::string _model_path; + // DEPLOY.MODEL_FILENAME + std::string _model_file_name; + // DEPLOY.PARAMS_FILENAME + std::string _param_file_name; + // DEPLOY.PRE_PROCESSOR + std::string _pre_processor; + // DEPLOY.USE_GPU + int _use_gpu; + // DEPLOY.PREDICTOR_MODE + std::string _predictor_mode; + // DEPLOY.BATCH_SIZE + int _batch_size; + }; + +} diff --git a/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.cc b/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.cc new file mode 100644 index 0000000000000000000000000000000000000000..b5cce7317914cf93f99d0d4efa3aee763972cc4e --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.cc @@ -0,0 +1,1159 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: detection_result.proto + +#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION +#include "detection_result.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace PaddleSolution { + +namespace { + +const ::google::protobuf::Descriptor* DetectionBox_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + DetectionBox_reflection_ = NULL; +const ::google::protobuf::Descriptor* DetectionResult_descriptor_ = NULL; +const ::google::protobuf::internal::GeneratedMessageReflection* + DetectionResult_reflection_ = NULL; + +} // namespace + + +void protobuf_AssignDesc_detection_5fresult_2eproto() GOOGLE_ATTRIBUTE_COLD; +void protobuf_AssignDesc_detection_5fresult_2eproto() { + protobuf_AddDesc_detection_5fresult_2eproto(); + const ::google::protobuf::FileDescriptor* file = + ::google::protobuf::DescriptorPool::generated_pool()->FindFileByName( + "detection_result.proto"); + GOOGLE_CHECK(file != NULL); + DetectionBox_descriptor_ = file->message_type(0); + static const int DetectionBox_offsets_[6] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, class__), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, score_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, left_top_x_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, left_top_y_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, right_bottom_x_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, right_bottom_y_), + }; + DetectionBox_reflection_ = + ::google::protobuf::internal::GeneratedMessageReflection::NewGeneratedMessageReflection( + DetectionBox_descriptor_, + DetectionBox::internal_default_instance(), + DetectionBox_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, _has_bits_), + -1, + -1, + sizeof(DetectionBox), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionBox, _internal_metadata_)); + DetectionResult_descriptor_ = file->message_type(1); + static const int DetectionResult_offsets_[2] = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionResult, filename_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionResult, detection_boxes_), + }; + DetectionResult_reflection_ = + ::google::protobuf::internal::GeneratedMessageReflection::NewGeneratedMessageReflection( + DetectionResult_descriptor_, + DetectionResult::internal_default_instance(), + DetectionResult_offsets_, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionResult, _has_bits_), + -1, + -1, + sizeof(DetectionResult), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DetectionResult, _internal_metadata_)); +} + +namespace { + +GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_); +void protobuf_AssignDescriptorsOnce() { + ::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_, + &protobuf_AssignDesc_detection_5fresult_2eproto); +} + +void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD; +void protobuf_RegisterTypes(const ::std::string&) { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + DetectionBox_descriptor_, DetectionBox::internal_default_instance()); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage( + DetectionResult_descriptor_, DetectionResult::internal_default_instance()); +} + +} // namespace + +void protobuf_ShutdownFile_detection_5fresult_2eproto() { + DetectionBox_default_instance_.Shutdown(); + delete DetectionBox_reflection_; + DetectionResult_default_instance_.Shutdown(); + delete DetectionResult_reflection_; +} + +void protobuf_InitDefaults_detection_5fresult_2eproto_impl() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + DetectionBox_default_instance_.DefaultConstruct(); + ::google::protobuf::internal::GetEmptyString(); + DetectionResult_default_instance_.DefaultConstruct(); + DetectionBox_default_instance_.get_mutable()->InitAsDefaultInstance(); + DetectionResult_default_instance_.get_mutable()->InitAsDefaultInstance(); +} + +GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_InitDefaults_detection_5fresult_2eproto_once_); +void protobuf_InitDefaults_detection_5fresult_2eproto() { + ::google::protobuf::GoogleOnceInit(&protobuf_InitDefaults_detection_5fresult_2eproto_once_, + &protobuf_InitDefaults_detection_5fresult_2eproto_impl); +} +void protobuf_AddDesc_detection_5fresult_2eproto_impl() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + protobuf_InitDefaults_detection_5fresult_2eproto(); + ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( + "\n\026detection_result.proto\022\016PaddleSolution" + "\"\204\001\n\014DetectionBox\022\r\n\005class\030\001 \001(\005\022\r\n\005scor" + "e\030\002 \001(\002\022\022\n\nleft_top_x\030\003 \001(\002\022\022\n\nleft_top_" + "y\030\004 \001(\002\022\026\n\016right_bottom_x\030\005 \001(\002\022\026\n\016right" + "_bottom_y\030\006 \001(\002\"Z\n\017DetectionResult\022\020\n\010fi" + "lename\030\001 \001(\t\0225\n\017detection_boxes\030\002 \003(\0132\034." + "PaddleSolution.DetectionBox", 267); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( + "detection_result.proto", &protobuf_RegisterTypes); + ::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_detection_5fresult_2eproto); +} + +GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AddDesc_detection_5fresult_2eproto_once_); +void protobuf_AddDesc_detection_5fresult_2eproto() { + ::google::protobuf::GoogleOnceInit(&protobuf_AddDesc_detection_5fresult_2eproto_once_, + &protobuf_AddDesc_detection_5fresult_2eproto_impl); +} +// Force AddDescriptors() to be called at static initialization time. +struct StaticDescriptorInitializer_detection_5fresult_2eproto { + StaticDescriptorInitializer_detection_5fresult_2eproto() { + protobuf_AddDesc_detection_5fresult_2eproto(); + } +} static_descriptor_initializer_detection_5fresult_2eproto_; + +namespace { + +static void MergeFromFail(int line) GOOGLE_ATTRIBUTE_COLD GOOGLE_ATTRIBUTE_NORETURN; +static void MergeFromFail(int line) { + ::google::protobuf::internal::MergeFromFail(__FILE__, line); +} + +} // namespace + + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int DetectionBox::kClassFieldNumber; +const int DetectionBox::kScoreFieldNumber; +const int DetectionBox::kLeftTopXFieldNumber; +const int DetectionBox::kLeftTopYFieldNumber; +const int DetectionBox::kRightBottomXFieldNumber; +const int DetectionBox::kRightBottomYFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +DetectionBox::DetectionBox() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (this != internal_default_instance()) protobuf_InitDefaults_detection_5fresult_2eproto(); + SharedCtor(); + // @@protoc_insertion_point(constructor:PaddleSolution.DetectionBox) +} + +void DetectionBox::InitAsDefaultInstance() { +} + +DetectionBox::DetectionBox(const DetectionBox& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL) { + SharedCtor(); + UnsafeMergeFrom(from); + // @@protoc_insertion_point(copy_constructor:PaddleSolution.DetectionBox) +} + +void DetectionBox::SharedCtor() { + _cached_size_ = 0; + ::memset(&class__, 0, reinterpret_cast(&right_bottom_y_) - + reinterpret_cast(&class__) + sizeof(right_bottom_y_)); +} + +DetectionBox::~DetectionBox() { + // @@protoc_insertion_point(destructor:PaddleSolution.DetectionBox) + SharedDtor(); +} + +void DetectionBox::SharedDtor() { +} + +void DetectionBox::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* DetectionBox::descriptor() { + protobuf_AssignDescriptorsOnce(); + return DetectionBox_descriptor_; +} + +const DetectionBox& DetectionBox::default_instance() { + protobuf_InitDefaults_detection_5fresult_2eproto(); + return *internal_default_instance(); +} + +::google::protobuf::internal::ExplicitlyConstructed DetectionBox_default_instance_; + +DetectionBox* DetectionBox::New(::google::protobuf::Arena* arena) const { + DetectionBox* n = new DetectionBox; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void DetectionBox::Clear() { +// @@protoc_insertion_point(message_clear_start:PaddleSolution.DetectionBox) +#if defined(__clang__) +#define ZR_HELPER_(f) \ + _Pragma("clang diagnostic push") \ + _Pragma("clang diagnostic ignored \"-Winvalid-offsetof\"") \ + __builtin_offsetof(DetectionBox, f) \ + _Pragma("clang diagnostic pop") +#else +#define ZR_HELPER_(f) reinterpret_cast(\ + &reinterpret_cast(16)->f) +#endif + +#define ZR_(first, last) do {\ + ::memset(&(first), 0,\ + ZR_HELPER_(last) - ZR_HELPER_(first) + sizeof(last));\ +} while (0) + + ZR_(class__, right_bottom_y_); + +#undef ZR_HELPER_ +#undef ZR_ + + _has_bits_.Clear(); + if (_internal_metadata_.have_unknown_fields()) { + mutable_unknown_fields()->Clear(); + } +} + +bool DetectionBox::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:PaddleSolution.DetectionBox) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional int32 class = 1; + case 1: { + if (tag == 8) { + set_has_class_(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &class__))); + } else { + goto handle_unusual; + } + if (input->ExpectTag(21)) goto parse_score; + break; + } + + // optional float score = 2; + case 2: { + if (tag == 21) { + parse_score: + set_has_score(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( + input, &score_))); + } else { + goto handle_unusual; + } + if (input->ExpectTag(29)) goto parse_left_top_x; + break; + } + + // optional float left_top_x = 3; + case 3: { + if (tag == 29) { + parse_left_top_x: + set_has_left_top_x(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( + input, &left_top_x_))); + } else { + goto handle_unusual; + } + if (input->ExpectTag(37)) goto parse_left_top_y; + break; + } + + // optional float left_top_y = 4; + case 4: { + if (tag == 37) { + parse_left_top_y: + set_has_left_top_y(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( + input, &left_top_y_))); + } else { + goto handle_unusual; + } + if (input->ExpectTag(45)) goto parse_right_bottom_x; + break; + } + + // optional float right_bottom_x = 5; + case 5: { + if (tag == 45) { + parse_right_bottom_x: + set_has_right_bottom_x(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( + input, &right_bottom_x_))); + } else { + goto handle_unusual; + } + if (input->ExpectTag(53)) goto parse_right_bottom_y; + break; + } + + // optional float right_bottom_y = 6; + case 6: { + if (tag == 53) { + parse_right_bottom_y: + set_has_right_bottom_y(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( + input, &right_bottom_y_))); + } else { + goto handle_unusual; + } + if (input->ExpectAtEnd()) goto success; + break; + } + + default: { + handle_unusual: + if (tag == 0 || + ::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:PaddleSolution.DetectionBox) + return true; +failure: + // @@protoc_insertion_point(parse_failure:PaddleSolution.DetectionBox) + return false; +#undef DO_ +} + +void DetectionBox::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:PaddleSolution.DetectionBox) + // optional int32 class = 1; + if (has_class_()) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->class_(), output); + } + + // optional float score = 2; + if (has_score()) { + ::google::protobuf::internal::WireFormatLite::WriteFloat(2, this->score(), output); + } + + // optional float left_top_x = 3; + if (has_left_top_x()) { + ::google::protobuf::internal::WireFormatLite::WriteFloat(3, this->left_top_x(), output); + } + + // optional float left_top_y = 4; + if (has_left_top_y()) { + ::google::protobuf::internal::WireFormatLite::WriteFloat(4, this->left_top_y(), output); + } + + // optional float right_bottom_x = 5; + if (has_right_bottom_x()) { + ::google::protobuf::internal::WireFormatLite::WriteFloat(5, this->right_bottom_x(), output); + } + + // optional float right_bottom_y = 6; + if (has_right_bottom_y()) { + ::google::protobuf::internal::WireFormatLite::WriteFloat(6, this->right_bottom_y(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:PaddleSolution.DetectionBox) +} + +::google::protobuf::uint8* DetectionBox::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:PaddleSolution.DetectionBox) + // optional int32 class = 1; + if (has_class_()) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->class_(), target); + } + + // optional float score = 2; + if (has_score()) { + target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(2, this->score(), target); + } + + // optional float left_top_x = 3; + if (has_left_top_x()) { + target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(3, this->left_top_x(), target); + } + + // optional float left_top_y = 4; + if (has_left_top_y()) { + target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(4, this->left_top_y(), target); + } + + // optional float right_bottom_x = 5; + if (has_right_bottom_x()) { + target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(5, this->right_bottom_x(), target); + } + + // optional float right_bottom_y = 6; + if (has_right_bottom_y()) { + target = ::google::protobuf::internal::WireFormatLite::WriteFloatToArray(6, this->right_bottom_y(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:PaddleSolution.DetectionBox) + return target; +} + +size_t DetectionBox::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:PaddleSolution.DetectionBox) + size_t total_size = 0; + + if (_has_bits_[0 / 32] & 63u) { + // optional int32 class = 1; + if (has_class_()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->class_()); + } + + // optional float score = 2; + if (has_score()) { + total_size += 1 + 4; + } + + // optional float left_top_x = 3; + if (has_left_top_x()) { + total_size += 1 + 4; + } + + // optional float left_top_y = 4; + if (has_left_top_y()) { + total_size += 1 + 4; + } + + // optional float right_bottom_x = 5; + if (has_right_bottom_x()) { + total_size += 1 + 4; + } + + // optional float right_bottom_y = 6; + if (has_right_bottom_y()) { + total_size += 1 + 4; + } + + } + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void DetectionBox::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:PaddleSolution.DetectionBox) + if (GOOGLE_PREDICT_FALSE(&from == this)) MergeFromFail(__LINE__); + const DetectionBox* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:PaddleSolution.DetectionBox) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:PaddleSolution.DetectionBox) + UnsafeMergeFrom(*source); + } +} + +void DetectionBox::MergeFrom(const DetectionBox& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:PaddleSolution.DetectionBox) + if (GOOGLE_PREDICT_TRUE(&from != this)) { + UnsafeMergeFrom(from); + } else { + MergeFromFail(__LINE__); + } +} + +void DetectionBox::UnsafeMergeFrom(const DetectionBox& from) { + GOOGLE_DCHECK(&from != this); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_class_()) { + set_class_(from.class_()); + } + if (from.has_score()) { + set_score(from.score()); + } + if (from.has_left_top_x()) { + set_left_top_x(from.left_top_x()); + } + if (from.has_left_top_y()) { + set_left_top_y(from.left_top_y()); + } + if (from.has_right_bottom_x()) { + set_right_bottom_x(from.right_bottom_x()); + } + if (from.has_right_bottom_y()) { + set_right_bottom_y(from.right_bottom_y()); + } + } + if (from._internal_metadata_.have_unknown_fields()) { + ::google::protobuf::UnknownFieldSet::MergeToInternalMetdata( + from.unknown_fields(), &_internal_metadata_); + } +} + +void DetectionBox::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:PaddleSolution.DetectionBox) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void DetectionBox::CopyFrom(const DetectionBox& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:PaddleSolution.DetectionBox) + if (&from == this) return; + Clear(); + UnsafeMergeFrom(from); +} + +bool DetectionBox::IsInitialized() const { + + return true; +} + +void DetectionBox::Swap(DetectionBox* other) { + if (other == this) return; + InternalSwap(other); +} +void DetectionBox::InternalSwap(DetectionBox* other) { + std::swap(class__, other->class__); + std::swap(score_, other->score_); + std::swap(left_top_x_, other->left_top_x_); + std::swap(left_top_y_, other->left_top_y_); + std::swap(right_bottom_x_, other->right_bottom_x_); + std::swap(right_bottom_y_, other->right_bottom_y_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + std::swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata DetectionBox::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = DetectionBox_descriptor_; + metadata.reflection = DetectionBox_reflection_; + return metadata; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// DetectionBox + +// optional int32 class = 1; +bool DetectionBox::has_class_() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +void DetectionBox::set_has_class_() { + _has_bits_[0] |= 0x00000001u; +} +void DetectionBox::clear_has_class_() { + _has_bits_[0] &= ~0x00000001u; +} +void DetectionBox::clear_class_() { + class__ = 0; + clear_has_class_(); +} +::google::protobuf::int32 DetectionBox::class_() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.class) + return class__; +} +void DetectionBox::set_class_(::google::protobuf::int32 value) { + set_has_class_(); + class__ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.class) +} + +// optional float score = 2; +bool DetectionBox::has_score() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +void DetectionBox::set_has_score() { + _has_bits_[0] |= 0x00000002u; +} +void DetectionBox::clear_has_score() { + _has_bits_[0] &= ~0x00000002u; +} +void DetectionBox::clear_score() { + score_ = 0; + clear_has_score(); +} +float DetectionBox::score() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.score) + return score_; +} +void DetectionBox::set_score(float value) { + set_has_score(); + score_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.score) +} + +// optional float left_top_x = 3; +bool DetectionBox::has_left_top_x() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +void DetectionBox::set_has_left_top_x() { + _has_bits_[0] |= 0x00000004u; +} +void DetectionBox::clear_has_left_top_x() { + _has_bits_[0] &= ~0x00000004u; +} +void DetectionBox::clear_left_top_x() { + left_top_x_ = 0; + clear_has_left_top_x(); +} +float DetectionBox::left_top_x() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.left_top_x) + return left_top_x_; +} +void DetectionBox::set_left_top_x(float value) { + set_has_left_top_x(); + left_top_x_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.left_top_x) +} + +// optional float left_top_y = 4; +bool DetectionBox::has_left_top_y() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +void DetectionBox::set_has_left_top_y() { + _has_bits_[0] |= 0x00000008u; +} +void DetectionBox::clear_has_left_top_y() { + _has_bits_[0] &= ~0x00000008u; +} +void DetectionBox::clear_left_top_y() { + left_top_y_ = 0; + clear_has_left_top_y(); +} +float DetectionBox::left_top_y() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.left_top_y) + return left_top_y_; +} +void DetectionBox::set_left_top_y(float value) { + set_has_left_top_y(); + left_top_y_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.left_top_y) +} + +// optional float right_bottom_x = 5; +bool DetectionBox::has_right_bottom_x() const { + return (_has_bits_[0] & 0x00000010u) != 0; +} +void DetectionBox::set_has_right_bottom_x() { + _has_bits_[0] |= 0x00000010u; +} +void DetectionBox::clear_has_right_bottom_x() { + _has_bits_[0] &= ~0x00000010u; +} +void DetectionBox::clear_right_bottom_x() { + right_bottom_x_ = 0; + clear_has_right_bottom_x(); +} +float DetectionBox::right_bottom_x() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.right_bottom_x) + return right_bottom_x_; +} +void DetectionBox::set_right_bottom_x(float value) { + set_has_right_bottom_x(); + right_bottom_x_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.right_bottom_x) +} + +// optional float right_bottom_y = 6; +bool DetectionBox::has_right_bottom_y() const { + return (_has_bits_[0] & 0x00000020u) != 0; +} +void DetectionBox::set_has_right_bottom_y() { + _has_bits_[0] |= 0x00000020u; +} +void DetectionBox::clear_has_right_bottom_y() { + _has_bits_[0] &= ~0x00000020u; +} +void DetectionBox::clear_right_bottom_y() { + right_bottom_y_ = 0; + clear_has_right_bottom_y(); +} +float DetectionBox::right_bottom_y() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.right_bottom_y) + return right_bottom_y_; +} +void DetectionBox::set_right_bottom_y(float value) { + set_has_right_bottom_y(); + right_bottom_y_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.right_bottom_y) +} + +inline const DetectionBox* DetectionBox::internal_default_instance() { + return &DetectionBox_default_instance_.get(); +} +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int DetectionResult::kFilenameFieldNumber; +const int DetectionResult::kDetectionBoxesFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +DetectionResult::DetectionResult() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (this != internal_default_instance()) protobuf_InitDefaults_detection_5fresult_2eproto(); + SharedCtor(); + // @@protoc_insertion_point(constructor:PaddleSolution.DetectionResult) +} + +void DetectionResult::InitAsDefaultInstance() { +} + +DetectionResult::DetectionResult(const DetectionResult& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL) { + SharedCtor(); + UnsafeMergeFrom(from); + // @@protoc_insertion_point(copy_constructor:PaddleSolution.DetectionResult) +} + +void DetectionResult::SharedCtor() { + _cached_size_ = 0; + filename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +DetectionResult::~DetectionResult() { + // @@protoc_insertion_point(destructor:PaddleSolution.DetectionResult) + SharedDtor(); +} + +void DetectionResult::SharedDtor() { + filename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +void DetectionResult::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* DetectionResult::descriptor() { + protobuf_AssignDescriptorsOnce(); + return DetectionResult_descriptor_; +} + +const DetectionResult& DetectionResult::default_instance() { + protobuf_InitDefaults_detection_5fresult_2eproto(); + return *internal_default_instance(); +} + +::google::protobuf::internal::ExplicitlyConstructed DetectionResult_default_instance_; + +DetectionResult* DetectionResult::New(::google::protobuf::Arena* arena) const { + DetectionResult* n = new DetectionResult; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void DetectionResult::Clear() { +// @@protoc_insertion_point(message_clear_start:PaddleSolution.DetectionResult) + if (has_filename()) { + filename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + detection_boxes_.Clear(); + _has_bits_.Clear(); + if (_internal_metadata_.have_unknown_fields()) { + mutable_unknown_fields()->Clear(); + } +} + +bool DetectionResult::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:PaddleSolution.DetectionResult) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string filename = 1; + case 1: { + if (tag == 10) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_filename())); + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->filename().data(), this->filename().length(), + ::google::protobuf::internal::WireFormat::PARSE, + "PaddleSolution.DetectionResult.filename"); + } else { + goto handle_unusual; + } + if (input->ExpectTag(18)) goto parse_detection_boxes; + break; + } + + // repeated .PaddleSolution.DetectionBox detection_boxes = 2; + case 2: { + if (tag == 18) { + parse_detection_boxes: + DO_(input->IncrementRecursionDepth()); + parse_loop_detection_boxes: + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtualNoRecursionDepth( + input, add_detection_boxes())); + } else { + goto handle_unusual; + } + if (input->ExpectTag(18)) goto parse_loop_detection_boxes; + input->UnsafeDecrementRecursionDepth(); + if (input->ExpectAtEnd()) goto success; + break; + } + + default: { + handle_unusual: + if (tag == 0 || + ::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) == + ::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:PaddleSolution.DetectionResult) + return true; +failure: + // @@protoc_insertion_point(parse_failure:PaddleSolution.DetectionResult) + return false; +#undef DO_ +} + +void DetectionResult::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:PaddleSolution.DetectionResult) + // optional string filename = 1; + if (has_filename()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->filename().data(), this->filename().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "PaddleSolution.DetectionResult.filename"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->filename(), output); + } + + // repeated .PaddleSolution.DetectionBox detection_boxes = 2; + for (unsigned int i = 0, n = this->detection_boxes_size(); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, this->detection_boxes(i), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:PaddleSolution.DetectionResult) +} + +::google::protobuf::uint8* DetectionResult::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:PaddleSolution.DetectionResult) + // optional string filename = 1; + if (has_filename()) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->filename().data(), this->filename().length(), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "PaddleSolution.DetectionResult.filename"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->filename(), target); + } + + // repeated .PaddleSolution.DetectionBox detection_boxes = 2; + for (unsigned int i = 0, n = this->detection_boxes_size(); i < n; i++) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageNoVirtualToArray( + 2, this->detection_boxes(i), false, target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:PaddleSolution.DetectionResult) + return target; +} + +size_t DetectionResult::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:PaddleSolution.DetectionResult) + size_t total_size = 0; + + // optional string filename = 1; + if (has_filename()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->filename()); + } + + // repeated .PaddleSolution.DetectionBox detection_boxes = 2; + { + unsigned int count = this->detection_boxes_size(); + total_size += 1UL * count; + for (unsigned int i = 0; i < count; i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->detection_boxes(i)); + } + } + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + unknown_fields()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void DetectionResult::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:PaddleSolution.DetectionResult) + if (GOOGLE_PREDICT_FALSE(&from == this)) MergeFromFail(__LINE__); + const DetectionResult* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:PaddleSolution.DetectionResult) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:PaddleSolution.DetectionResult) + UnsafeMergeFrom(*source); + } +} + +void DetectionResult::MergeFrom(const DetectionResult& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:PaddleSolution.DetectionResult) + if (GOOGLE_PREDICT_TRUE(&from != this)) { + UnsafeMergeFrom(from); + } else { + MergeFromFail(__LINE__); + } +} + +void DetectionResult::UnsafeMergeFrom(const DetectionResult& from) { + GOOGLE_DCHECK(&from != this); + detection_boxes_.MergeFrom(from.detection_boxes_); + if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) { + if (from.has_filename()) { + set_has_filename(); + filename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filename_); + } + } + if (from._internal_metadata_.have_unknown_fields()) { + ::google::protobuf::UnknownFieldSet::MergeToInternalMetdata( + from.unknown_fields(), &_internal_metadata_); + } +} + +void DetectionResult::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:PaddleSolution.DetectionResult) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void DetectionResult::CopyFrom(const DetectionResult& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:PaddleSolution.DetectionResult) + if (&from == this) return; + Clear(); + UnsafeMergeFrom(from); +} + +bool DetectionResult::IsInitialized() const { + + return true; +} + +void DetectionResult::Swap(DetectionResult* other) { + if (other == this) return; + InternalSwap(other); +} +void DetectionResult::InternalSwap(DetectionResult* other) { + filename_.Swap(&other->filename_); + detection_boxes_.UnsafeArenaSwap(&other->detection_boxes_); + std::swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + std::swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata DetectionResult::GetMetadata() const { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::Metadata metadata; + metadata.descriptor = DetectionResult_descriptor_; + metadata.reflection = DetectionResult_reflection_; + return metadata; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// DetectionResult + +// optional string filename = 1; +bool DetectionResult::has_filename() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +void DetectionResult::set_has_filename() { + _has_bits_[0] |= 0x00000001u; +} +void DetectionResult::clear_has_filename() { + _has_bits_[0] &= ~0x00000001u; +} +void DetectionResult::clear_filename() { + filename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_filename(); +} +const ::std::string& DetectionResult::filename() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionResult.filename) + return filename_.GetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void DetectionResult::set_filename(const ::std::string& value) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionResult.filename) +} +void DetectionResult::set_filename(const char* value) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:PaddleSolution.DetectionResult.filename) +} +void DetectionResult::set_filename(const char* value, size_t size) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:PaddleSolution.DetectionResult.filename) +} +::std::string* DetectionResult::mutable_filename() { + set_has_filename(); + // @@protoc_insertion_point(field_mutable:PaddleSolution.DetectionResult.filename) + return filename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +::std::string* DetectionResult::release_filename() { + // @@protoc_insertion_point(field_release:PaddleSolution.DetectionResult.filename) + clear_has_filename(); + return filename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void DetectionResult::set_allocated_filename(::std::string* filename) { + if (filename != NULL) { + set_has_filename(); + } else { + clear_has_filename(); + } + filename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), filename); + // @@protoc_insertion_point(field_set_allocated:PaddleSolution.DetectionResult.filename) +} + +// repeated .PaddleSolution.DetectionBox detection_boxes = 2; +int DetectionResult::detection_boxes_size() const { + return detection_boxes_.size(); +} +void DetectionResult::clear_detection_boxes() { + detection_boxes_.Clear(); +} +const ::PaddleSolution::DetectionBox& DetectionResult::detection_boxes(int index) const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Get(index); +} +::PaddleSolution::DetectionBox* DetectionResult::mutable_detection_boxes(int index) { + // @@protoc_insertion_point(field_mutable:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Mutable(index); +} +::PaddleSolution::DetectionBox* DetectionResult::add_detection_boxes() { + // @@protoc_insertion_point(field_add:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Add(); +} +::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >* +DetectionResult::mutable_detection_boxes() { + // @@protoc_insertion_point(field_mutable_list:PaddleSolution.DetectionResult.detection_boxes) + return &detection_boxes_; +} +const ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >& +DetectionResult::detection_boxes() const { + // @@protoc_insertion_point(field_list:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_; +} + +inline const DetectionResult* DetectionResult::internal_default_instance() { + return &DetectionResult_default_instance_.get(); +} +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// @@protoc_insertion_point(namespace_scope) + +} // namespace PaddleSolution + +// @@protoc_insertion_point(global_scope) diff --git a/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.h b/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.h new file mode 100644 index 0000000000000000000000000000000000000000..1b2f89ea9ca13f3f949bd19b097bb514a4afc525 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/utils/detection_result.pb.h @@ -0,0 +1,563 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: detection_result.proto + +#ifndef PROTOBUF_detection_5fresult_2eproto__INCLUDED +#define PROTOBUF_detection_5fresult_2eproto__INCLUDED + +#include + +#include + +#if GOOGLE_PROTOBUF_VERSION < 3001000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3001000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace PaddleSolution { + +// Internal implementation detail -- do not call these. +void protobuf_AddDesc_detection_5fresult_2eproto(); +void protobuf_InitDefaults_detection_5fresult_2eproto(); +void protobuf_AssignDesc_detection_5fresult_2eproto(); +void protobuf_ShutdownFile_detection_5fresult_2eproto(); + +class DetectionBox; +class DetectionResult; + +// =================================================================== + +class DetectionBox : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:PaddleSolution.DetectionBox) */ { + public: + DetectionBox(); + virtual ~DetectionBox(); + + DetectionBox(const DetectionBox& from); + + inline DetectionBox& operator=(const DetectionBox& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const DetectionBox& default_instance(); + + static const DetectionBox* internal_default_instance(); + + void Swap(DetectionBox* other); + + // implements Message ---------------------------------------------- + + inline DetectionBox* New() const { return New(NULL); } + + DetectionBox* New(::google::protobuf::Arena* arena) const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const DetectionBox& from); + void MergeFrom(const DetectionBox& from); + void Clear(); + bool IsInitialized() const; + + size_t ByteSizeLong() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const { + return InternalSerializeWithCachedSizesToArray(false, output); + } + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + void InternalSwap(DetectionBox* other); + void UnsafeMergeFrom(const DetectionBox& from); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return _internal_metadata_.arena(); + } + inline void* MaybeArenaPtr() const { + return _internal_metadata_.raw_arena_ptr(); + } + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional int32 class = 1; + bool has_class_() const; + void clear_class_(); + static const int kClassFieldNumber = 1; + ::google::protobuf::int32 class_() const; + void set_class_(::google::protobuf::int32 value); + + // optional float score = 2; + bool has_score() const; + void clear_score(); + static const int kScoreFieldNumber = 2; + float score() const; + void set_score(float value); + + // optional float left_top_x = 3; + bool has_left_top_x() const; + void clear_left_top_x(); + static const int kLeftTopXFieldNumber = 3; + float left_top_x() const; + void set_left_top_x(float value); + + // optional float left_top_y = 4; + bool has_left_top_y() const; + void clear_left_top_y(); + static const int kLeftTopYFieldNumber = 4; + float left_top_y() const; + void set_left_top_y(float value); + + // optional float right_bottom_x = 5; + bool has_right_bottom_x() const; + void clear_right_bottom_x(); + static const int kRightBottomXFieldNumber = 5; + float right_bottom_x() const; + void set_right_bottom_x(float value); + + // optional float right_bottom_y = 6; + bool has_right_bottom_y() const; + void clear_right_bottom_y(); + static const int kRightBottomYFieldNumber = 6; + float right_bottom_y() const; + void set_right_bottom_y(float value); + + // @@protoc_insertion_point(class_scope:PaddleSolution.DetectionBox) + private: + inline void set_has_class_(); + inline void clear_has_class_(); + inline void set_has_score(); + inline void clear_has_score(); + inline void set_has_left_top_x(); + inline void clear_has_left_top_x(); + inline void set_has_left_top_y(); + inline void clear_has_left_top_y(); + inline void set_has_right_bottom_x(); + inline void clear_has_right_bottom_x(); + inline void set_has_right_bottom_y(); + inline void clear_has_right_bottom_y(); + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::int32 class__; + float score_; + float left_top_x_; + float left_top_y_; + float right_bottom_x_; + float right_bottom_y_; + friend void protobuf_InitDefaults_detection_5fresult_2eproto_impl(); + friend void protobuf_AddDesc_detection_5fresult_2eproto_impl(); + friend void protobuf_AssignDesc_detection_5fresult_2eproto(); + friend void protobuf_ShutdownFile_detection_5fresult_2eproto(); + + void InitAsDefaultInstance(); +}; +extern ::google::protobuf::internal::ExplicitlyConstructed DetectionBox_default_instance_; + +// ------------------------------------------------------------------- + +class DetectionResult : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:PaddleSolution.DetectionResult) */ { + public: + DetectionResult(); + virtual ~DetectionResult(); + + DetectionResult(const DetectionResult& from); + + inline DetectionResult& operator=(const DetectionResult& from) { + CopyFrom(from); + return *this; + } + + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const DetectionResult& default_instance(); + + static const DetectionResult* internal_default_instance(); + + void Swap(DetectionResult* other); + + // implements Message ---------------------------------------------- + + inline DetectionResult* New() const { return New(NULL); } + + DetectionResult* New(::google::protobuf::Arena* arena) const; + void CopyFrom(const ::google::protobuf::Message& from); + void MergeFrom(const ::google::protobuf::Message& from); + void CopyFrom(const DetectionResult& from); + void MergeFrom(const DetectionResult& from); + void Clear(); + bool IsInitialized() const; + + size_t ByteSizeLong() const; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input); + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* output) const; + ::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const { + return InternalSerializeWithCachedSizesToArray(false, output); + } + int GetCachedSize() const { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const; + void InternalSwap(DetectionResult* other); + void UnsafeMergeFrom(const DetectionResult& from); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return _internal_metadata_.arena(); + } + inline void* MaybeArenaPtr() const { + return _internal_metadata_.raw_arena_ptr(); + } + public: + + ::google::protobuf::Metadata GetMetadata() const; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string filename = 1; + bool has_filename() const; + void clear_filename(); + static const int kFilenameFieldNumber = 1; + const ::std::string& filename() const; + void set_filename(const ::std::string& value); + void set_filename(const char* value); + void set_filename(const char* value, size_t size); + ::std::string* mutable_filename(); + ::std::string* release_filename(); + void set_allocated_filename(::std::string* filename); + + // repeated .PaddleSolution.DetectionBox detection_boxes = 2; + int detection_boxes_size() const; + void clear_detection_boxes(); + static const int kDetectionBoxesFieldNumber = 2; + const ::PaddleSolution::DetectionBox& detection_boxes(int index) const; + ::PaddleSolution::DetectionBox* mutable_detection_boxes(int index); + ::PaddleSolution::DetectionBox* add_detection_boxes(); + ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >* + mutable_detection_boxes(); + const ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >& + detection_boxes() const; + + // @@protoc_insertion_point(class_scope:PaddleSolution.DetectionResult) + private: + inline void set_has_filename(); + inline void clear_has_filename(); + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox > detection_boxes_; + ::google::protobuf::internal::ArenaStringPtr filename_; + friend void protobuf_InitDefaults_detection_5fresult_2eproto_impl(); + friend void protobuf_AddDesc_detection_5fresult_2eproto_impl(); + friend void protobuf_AssignDesc_detection_5fresult_2eproto(); + friend void protobuf_ShutdownFile_detection_5fresult_2eproto(); + + void InitAsDefaultInstance(); +}; +extern ::google::protobuf::internal::ExplicitlyConstructed DetectionResult_default_instance_; + +// =================================================================== + + +// =================================================================== + +#if !PROTOBUF_INLINE_NOT_IN_HEADERS +// DetectionBox + +// optional int32 class = 1; +inline bool DetectionBox::has_class_() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void DetectionBox::set_has_class_() { + _has_bits_[0] |= 0x00000001u; +} +inline void DetectionBox::clear_has_class_() { + _has_bits_[0] &= ~0x00000001u; +} +inline void DetectionBox::clear_class_() { + class__ = 0; + clear_has_class_(); +} +inline ::google::protobuf::int32 DetectionBox::class_() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.class) + return class__; +} +inline void DetectionBox::set_class_(::google::protobuf::int32 value) { + set_has_class_(); + class__ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.class) +} + +// optional float score = 2; +inline bool DetectionBox::has_score() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void DetectionBox::set_has_score() { + _has_bits_[0] |= 0x00000002u; +} +inline void DetectionBox::clear_has_score() { + _has_bits_[0] &= ~0x00000002u; +} +inline void DetectionBox::clear_score() { + score_ = 0; + clear_has_score(); +} +inline float DetectionBox::score() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.score) + return score_; +} +inline void DetectionBox::set_score(float value) { + set_has_score(); + score_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.score) +} + +// optional float left_top_x = 3; +inline bool DetectionBox::has_left_top_x() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void DetectionBox::set_has_left_top_x() { + _has_bits_[0] |= 0x00000004u; +} +inline void DetectionBox::clear_has_left_top_x() { + _has_bits_[0] &= ~0x00000004u; +} +inline void DetectionBox::clear_left_top_x() { + left_top_x_ = 0; + clear_has_left_top_x(); +} +inline float DetectionBox::left_top_x() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.left_top_x) + return left_top_x_; +} +inline void DetectionBox::set_left_top_x(float value) { + set_has_left_top_x(); + left_top_x_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.left_top_x) +} + +// optional float left_top_y = 4; +inline bool DetectionBox::has_left_top_y() const { + return (_has_bits_[0] & 0x00000008u) != 0; +} +inline void DetectionBox::set_has_left_top_y() { + _has_bits_[0] |= 0x00000008u; +} +inline void DetectionBox::clear_has_left_top_y() { + _has_bits_[0] &= ~0x00000008u; +} +inline void DetectionBox::clear_left_top_y() { + left_top_y_ = 0; + clear_has_left_top_y(); +} +inline float DetectionBox::left_top_y() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.left_top_y) + return left_top_y_; +} +inline void DetectionBox::set_left_top_y(float value) { + set_has_left_top_y(); + left_top_y_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.left_top_y) +} + +// optional float right_bottom_x = 5; +inline bool DetectionBox::has_right_bottom_x() const { + return (_has_bits_[0] & 0x00000010u) != 0; +} +inline void DetectionBox::set_has_right_bottom_x() { + _has_bits_[0] |= 0x00000010u; +} +inline void DetectionBox::clear_has_right_bottom_x() { + _has_bits_[0] &= ~0x00000010u; +} +inline void DetectionBox::clear_right_bottom_x() { + right_bottom_x_ = 0; + clear_has_right_bottom_x(); +} +inline float DetectionBox::right_bottom_x() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.right_bottom_x) + return right_bottom_x_; +} +inline void DetectionBox::set_right_bottom_x(float value) { + set_has_right_bottom_x(); + right_bottom_x_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.right_bottom_x) +} + +// optional float right_bottom_y = 6; +inline bool DetectionBox::has_right_bottom_y() const { + return (_has_bits_[0] & 0x00000020u) != 0; +} +inline void DetectionBox::set_has_right_bottom_y() { + _has_bits_[0] |= 0x00000020u; +} +inline void DetectionBox::clear_has_right_bottom_y() { + _has_bits_[0] &= ~0x00000020u; +} +inline void DetectionBox::clear_right_bottom_y() { + right_bottom_y_ = 0; + clear_has_right_bottom_y(); +} +inline float DetectionBox::right_bottom_y() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionBox.right_bottom_y) + return right_bottom_y_; +} +inline void DetectionBox::set_right_bottom_y(float value) { + set_has_right_bottom_y(); + right_bottom_y_ = value; + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionBox.right_bottom_y) +} + +inline const DetectionBox* DetectionBox::internal_default_instance() { + return &DetectionBox_default_instance_.get(); +} +// ------------------------------------------------------------------- + +// DetectionResult + +// optional string filename = 1; +inline bool DetectionResult::has_filename() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void DetectionResult::set_has_filename() { + _has_bits_[0] |= 0x00000001u; +} +inline void DetectionResult::clear_has_filename() { + _has_bits_[0] &= ~0x00000001u; +} +inline void DetectionResult::clear_filename() { + filename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_filename(); +} +inline const ::std::string& DetectionResult::filename() const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionResult.filename) + return filename_.GetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void DetectionResult::set_filename(const ::std::string& value) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:PaddleSolution.DetectionResult.filename) +} +inline void DetectionResult::set_filename(const char* value) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:PaddleSolution.DetectionResult.filename) +} +inline void DetectionResult::set_filename(const char* value, size_t size) { + set_has_filename(); + filename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:PaddleSolution.DetectionResult.filename) +} +inline ::std::string* DetectionResult::mutable_filename() { + set_has_filename(); + // @@protoc_insertion_point(field_mutable:PaddleSolution.DetectionResult.filename) + return filename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* DetectionResult::release_filename() { + // @@protoc_insertion_point(field_release:PaddleSolution.DetectionResult.filename) + clear_has_filename(); + return filename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void DetectionResult::set_allocated_filename(::std::string* filename) { + if (filename != NULL) { + set_has_filename(); + } else { + clear_has_filename(); + } + filename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), filename); + // @@protoc_insertion_point(field_set_allocated:PaddleSolution.DetectionResult.filename) +} + +// repeated .PaddleSolution.DetectionBox detection_boxes = 2; +inline int DetectionResult::detection_boxes_size() const { + return detection_boxes_.size(); +} +inline void DetectionResult::clear_detection_boxes() { + detection_boxes_.Clear(); +} +inline const ::PaddleSolution::DetectionBox& DetectionResult::detection_boxes(int index) const { + // @@protoc_insertion_point(field_get:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Get(index); +} +inline ::PaddleSolution::DetectionBox* DetectionResult::mutable_detection_boxes(int index) { + // @@protoc_insertion_point(field_mutable:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Mutable(index); +} +inline ::PaddleSolution::DetectionBox* DetectionResult::add_detection_boxes() { + // @@protoc_insertion_point(field_add:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_.Add(); +} +inline ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >* +DetectionResult::mutable_detection_boxes() { + // @@protoc_insertion_point(field_mutable_list:PaddleSolution.DetectionResult.detection_boxes) + return &detection_boxes_; +} +inline const ::google::protobuf::RepeatedPtrField< ::PaddleSolution::DetectionBox >& +DetectionResult::detection_boxes() const { + // @@protoc_insertion_point(field_list:PaddleSolution.DetectionResult.detection_boxes) + return detection_boxes_; +} + +inline const DetectionResult* DetectionResult::internal_default_instance() { + return &DetectionResult_default_instance_.get(); +} +#endif // !PROTOBUF_INLINE_NOT_IN_HEADERS +// ------------------------------------------------------------------- + + +// @@protoc_insertion_point(namespace_scope) + +} // namespace PaddleSolution + +// @@protoc_insertion_point(global_scope) + +#endif // PROTOBUF_detection_5fresult_2eproto__INCLUDED diff --git a/PaddleCV/PaddleDetection/inference/utils/detection_result.proto b/PaddleCV/PaddleDetection/inference/utils/detection_result.proto new file mode 100644 index 0000000000000000000000000000000000000000..2d1cbb2464ac09b0dcea01f8331da5ee7894a4d5 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/utils/detection_result.proto @@ -0,0 +1,21 @@ +syntax = "proto2"; +package PaddleSolution; + +message DetectionBox { + optional int32 class = 1; + optional float score = 2; + optional float left_top_x = 3; + optional float left_top_y = 4; + optional float right_bottom_x = 5; + optional float right_bottom_y = 6; +} + +message DetectionResult { + optional string filename = 1; + repeated DetectionBox detection_boxes = 2; +} + +//message DetectionResultsContainer { +// repeated DetectionResult result = 1; +//} + diff --git a/PaddleCV/PaddleDetection/inference/utils/utils.h b/PaddleCV/PaddleDetection/inference/utils/utils.h new file mode 100644 index 0000000000000000000000000000000000000000..63245219edb6ad39e896f1eb041e8bff69613382 --- /dev/null +++ b/PaddleCV/PaddleDetection/inference/utils/utils.h @@ -0,0 +1,124 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include +#include +#include +#include +#include +#include +#ifdef _WIN32 +#include +#else +#include +#include +#endif + +namespace PaddleSolution { + namespace utils { + enum SCALE_TYPE{ + UNPADDING, + RANGE_SCALING + }; + inline std::string path_join(const std::string& dir, const std::string& path) { + std::string seperator = "/"; + #ifdef _WIN32 + seperator = "\\"; + #endif + return dir + seperator + path; + } + #ifndef _WIN32 + // scan a directory and get all files with input extensions + inline std::vector get_directory_images(const std::string& path, const std::string& exts) + { + std::vector imgs; + struct dirent *entry; + DIR *dir = opendir(path.c_str()); + if (dir == NULL) { + closedir(dir); + return imgs; + } + + while ((entry = readdir(dir)) != NULL) { + std::string item = entry->d_name; + auto ext = strrchr(entry->d_name, '.'); + if (!ext || std::string(ext) == "." || std::string(ext) == "..") { + continue; + } + if (exts.find(ext) != std::string::npos) { + imgs.push_back(path_join(path, entry->d_name)); + } + } + sort(imgs.begin(), imgs.end()); + return imgs; + } + #else + // scan a directory and get all files with input extensions + inline std::vector get_directory_images(const std::string& path, const std::string& exts) + { + std::vector imgs; + for (const auto& item : std::experimental::filesystem::directory_iterator(path)) { + auto suffix = item.path().extension().string(); + if (exts.find(suffix) != std::string::npos && suffix.size() > 0) { + auto fullname = path_join(path, item.path().filename().string()); + imgs.push_back(item.path().string()); + } + } + sort(imgs.begin(), imgs.end()); + return imgs; + } + #endif + + inline int scaling(int resize_type, int &w, int &h, int new_w, int new_h, int target_size, int max_size, float &im_scale_ratio) + { + if(w <= 0 || h <= 0 || new_w <= 0 || new_h <= 0){ + return -1; + } + switch(resize_type) { + case SCALE_TYPE::UNPADDING: + { + w = new_w; + h = new_h; + im_scale_ratio=0; + } + break; + case SCALE_TYPE::RANGE_SCALING: + { + int im_max_size = std::max(w, h); + int im_min_size = std::min(w, h); + float scale_ratio= static_cast(target_size) / static_cast(im_min_size); + if(max_size > 0) { + if(round(scale_ratio * im_max_size) > max_size) { + scale_ratio = static_cast(max_size) / static_cast(im_max_size); + } + } + w = round(scale_ratio * static_cast(w)); + h = round(scale_ratio * static_cast(h)); + im_scale_ratio = scale_ratio; + } + break; + default : + { + std::cout << "Can't support this type of scaling strategy." << std::endl; + std::cout << "Throw exception at file " << __FILE__ << " on line " << __LINE__ << std::endl; + throw 0; + } + break; + } + return 0; + } + } +}