diff --git a/cmake/external/onnxruntime.cmake b/cmake/external/onnxruntime.cmake index 2162f87812d130f19262955798f28e2c2adc4bac..89e913e18728f36fd395fefce3c8bd040940dca8 100644 --- a/cmake/external/onnxruntime.cmake +++ b/cmake/external/onnxruntime.cmake @@ -12,83 +12,125 @@ # See the License for the specific language governing permissions and # limitations under the License. -if (NOT WITH_ONNXRUNTIME) +if(NOT WITH_ONNXRUNTIME) return() -endif () +endif() -if (WITH_ARM) +if(WITH_ARM) message(SEND_ERROR "The current onnxruntime backend doesn't support ARM cpu") return() -endif () +endif() -INCLUDE(ExternalProject) +include(ExternalProject) add_definitions(-DPADDLE_WITH_ONNXRUNTIME) -SET(ONNXRUNTIME_PROJECT "extern_onnxruntime") -SET(ONNXRUNTIME_PREFIX_DIR ${THIRD_PARTY_PATH}/onnxruntime) -SET(ONNXRUNTIME_SOURCE_DIR ${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME_PROJECT}) -SET(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime) -SET(ONNXRUNTIME_INC_DIR "${ONNXRUNTIME_INSTALL_DIR}/include" CACHE PATH "onnxruntime include directory." FORCE) -SET(ONNXRUNTIME_LIB_DIR "${ONNXRUNTIME_INSTALL_DIR}/lib" CACHE PATH "onnxruntime lib directory." FORCE) -SET(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${ONNXRUNTIME_LIB_DIR}") - +set(ONNXRUNTIME_PROJECT "extern_onnxruntime") +set(ONNXRUNTIME_VERSION "1.10.0") +set(ONNXRUNTIME_PREFIX_DIR ${THIRD_PARTY_PATH}/onnxruntime) +set(ONNXRUNTIME_SOURCE_DIR + ${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME_PROJECT}) +set(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime) +set(ONNXRUNTIME_INC_DIR + "${ONNXRUNTIME_INSTALL_DIR}/include" + CACHE PATH "onnxruntime include directory." FORCE) +set(ONNXRUNTIME_LIB_DIR + "${ONNXRUNTIME_INSTALL_DIR}/lib" + CACHE PATH "onnxruntime lib directory." FORCE) +set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${ONNXRUNTIME_LIB_DIR}") -if (WIN32) - SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-win-x64-1.10.0.zip") -elseif (APPLE) - SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-osx-x86_64-1.10.0.tgz") -else () - SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-linux-x64-1.10.0.tgz") +if(WIN32) + set(ONNXRUNTIME_URL + "https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-win-x64-${ONNXRUNTIME_VERSION}.zip" + ) +elseif(APPLE) + set(ONNXRUNTIME_URL + "https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-osx-x86_64-${ONNXRUNTIME_VERSION}.tgz" + ) +else() + set(ONNXRUNTIME_URL + "https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz" + ) endif() +# For ONNXRUNTIME code to include internal headers. +include_directories(${ONNXRUNTIME_INC_DIR}) -INCLUDE_DIRECTORIES(${ONNXRUNTIME_INC_DIR}) # For ONNXRUNTIME code to include internal headers. -if (WIN32) - SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME source library." FORCE) - SET(ONNXRUNTIME_SHARED_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) - SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE) -elseif (APPLE) - SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME source library." FORCE) - SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE) - SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) -else () - SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME source library." FORCE) - SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME static library." FORCE) - SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) -endif () +set(ONNXRUNTIME_LIB_NEW_NAME "libonnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}") +if(APPLE) + set(ONNXRUNTIME_LIB_NAME + "libonnxruntime.${ONNXRUNTIME_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + set(ONNXRUNTIME_LIB_NAME + "libonnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}.${ONNXRUNTIME_VERSION}") +endif() +if(WIN32) + set(ONNXRUNTIME_SOURCE_LIB + "${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll" + CACHE FILEPATH "ONNXRUNTIME source library." FORCE) + set(ONNXRUNTIME_SHARED_LIB + "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll" + CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) + set(ONNXRUNTIME_LIB + "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib" + CACHE FILEPATH "ONNXRUNTIME static library." FORCE) +elseif(APPLE) + set(ONNXRUNTIME_SOURCE_LIB + "${ONNXRUNTIME_SOURCE_DIR}/lib/${ONNXRUNTIME_LIB_NAME}" + CACHE FILEPATH "ONNXRUNTIME source library." FORCE) + set(ONNXRUNTIME_LIB + "${ONNXRUNTIME_INSTALL_DIR}/lib/${ONNXRUNTIME_LIB_NAME}" + CACHE FILEPATH "ONNXRUNTIME static library." FORCE) + set(ONNXRUNTIME_SHARED_LIB + ${ONNXRUNTIME_LIB} + CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) +else() + set(ONNXRUNTIME_SOURCE_LIB + "${ONNXRUNTIME_SOURCE_DIR}/lib/${ONNXRUNTIME_LIB_NAME}" + CACHE FILEPATH "ONNXRUNTIME source library." FORCE) + set(ONNXRUNTIME_LIB + "${ONNXRUNTIME_INSTALL_DIR}/lib/${ONNXRUNTIME_LIB_NAME}" + CACHE FILEPATH "ONNXRUNTIME static library." FORCE) + set(ONNXRUNTIME_SHARED_LIB + ${ONNXRUNTIME_LIB} + CACHE FILEPATH "ONNXRUNTIME shared library." FORCE) +endif() -if (WIN32) +if(WIN32) ExternalProject_Add( - ${ONNXRUNTIME_PROJECT} - ${EXTERNAL_PROJECT_LOG_ARGS} - URL ${ONNXRUNTIME_URL} - PREFIX ${ONNXRUNTIME_PREFIX_DIR} - DOWNLOAD_NO_PROGRESS 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - UPDATE_COMMAND "" - INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_SHARED_LIB} && - ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.lib ${ONNXRUNTIME_LIB} && - ${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include ${ONNXRUNTIME_INC_DIR} - BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB} - ) -else () + ${ONNXRUNTIME_PROJECT} + ${EXTERNAL_PROJECT_LOG_ARGS} + URL ${ONNXRUNTIME_URL} + PREFIX ${ONNXRUNTIME_PREFIX_DIR} + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} + ${ONNXRUNTIME_SHARED_LIB} && ${CMAKE_COMMAND} -E copy + ${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.lib ${ONNXRUNTIME_LIB} && + ${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include + ${ONNXRUNTIME_INC_DIR} + BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB}) +else() ExternalProject_Add( ${ONNXRUNTIME_PROJECT} ${EXTERNAL_PROJECT_LOG_ARGS} - URL ${ONNXRUNTIME_URL} - PREFIX ${ONNXRUNTIME_PREFIX_DIR} - DOWNLOAD_NO_PROGRESS 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - UPDATE_COMMAND "" - INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_LIB} && - ${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include ${ONNXRUNTIME_INC_DIR} - BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB} - ) + URL ${ONNXRUNTIME_URL} + PREFIX ${ONNXRUNTIME_PREFIX_DIR} + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_LIB} && + ${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include + ${ONNXRUNTIME_INC_DIR} && ${CMAKE_COMMAND} -E create_symlink + ${ONNXRUNTIME_LIB_NAME} ${ONNXRUNTIME_LIB_DIR}/${ONNXRUNTIME_LIB_NEW_NAME} + BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB}) endif() -ADD_LIBRARY(onnxruntime STATIC IMPORTED GLOBAL) -SET_PROPERTY(TARGET onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB}) -ADD_DEPENDENCIES(onnxruntime ${ONNXRUNTIME_PROJECT}) +add_library(onnxruntime STATIC IMPORTED GLOBAL) +set_property(TARGET onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB}) +add_dependencies(onnxruntime ${ONNXRUNTIME_PROJECT}) diff --git a/cmake/external/paddle2onnx.cmake b/cmake/external/paddle2onnx.cmake index 4f590fb3fd709b9df16676067be3392d9ecf4bd7..5076e1b2cffd88d37d8b7bf31a951dc211614e65 100644 --- a/cmake/external/paddle2onnx.cmake +++ b/cmake/external/paddle2onnx.cmake @@ -16,63 +16,104 @@ if(NOT WITH_ONNXRUNTIME) return() endif() -if (WITH_ARM) +if(WITH_ARM) message(SEND_ERROR "The current onnxruntime backend doesn't support ARM cpu") return() -endif () +endif() -INCLUDE(ExternalProject) +include(ExternalProject) -SET(PADDLE2ONNX_PROJECT "extern_paddle2onnx") -SET(PADDLE2ONNX_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle2onnx) -SET(PADDLE2ONNX_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle2onnx) -SET(PADDLE2ONNX_SOURCE_DIR ${THIRD_PARTY_PATH}/paddle2onnx/src/${PADDLE2ONNX_PROJECT}) -SET(PADDLE2ONNX_INC_DIR "${PADDLE2ONNX_INSTALL_DIR}/include" CACHE PATH "paddle2onnx include directory." FORCE) -SET(PADDLE2ONNX_LIB_DIR +set(PADDLE2ONNX_PROJECT "extern_paddle2onnx") +set(PADDLE2ONNX_VERSION "0.9.8") +set(PADDLE2ONNX_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle2onnx) +set(PADDLE2ONNX_SOURCE_DIR + ${THIRD_PARTY_PATH}/paddle2onnx/src/${PADDLE2ONNX_PROJECT}) +set(PADDLE2ONNX_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle2onnx) +set(PADDLE2ONNX_INC_DIR + "${PADDLE2ONNX_INSTALL_DIR}/include" + CACHE PATH "paddle2onnx include directory." FORCE) +set(PADDLE2ONNX_LIB_DIR "${PADDLE2ONNX_INSTALL_DIR}/lib" CACHE PATH "onnxruntime lib directory." FORCE) -SET(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}") +set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${PADDLE2ONNX_LIB_DIR}") + +# For PADDLE2ONNX code to include internal headers. +include_directories(${PADDLE2ONNX_INC_DIR}) +set(PADDLE2ONNX_LIB_NEW_NAME "libpaddle2onnx${CMAKE_SHARED_LIBRARY_SUFFIX}") +if(APPLE) + set(PADDLE2ONNX_LIB_NAME + "libpaddle2onnx.${PADDLE2ONNX_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + set(PADDLE2ONNX_LIB_NAME + "libpaddle2onnx${CMAKE_SHARED_LIBRARY_SUFFIX}.${PADDLE2ONNX_VERSION}") +endif() -INCLUDE_DIRECTORIES(${PADDLE2ONNX_INC_DIR}) # For PADDLE2ONNX code to include internal headers. if(WIN32) - SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.dll" CACHE FILEPATH "paddle2onnx library." FORCE) - SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.lib" CACHE FILEPATH "paddle2onnx compile library." FORCE) -elseif(APPLE) - SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib" CACHE FILEPATH "PADDLE2ONNX library." FORCE) - SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib" CACHE FILEPATH "paddle2onnx compile library." FORCE) + set(PADDLE2ONNX_LIB + "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.dll" + CACHE FILEPATH "paddle2onnx library." FORCE) + set(PADDLE2ONNX_COMPILE_LIB + "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.lib" + CACHE FILEPATH "paddle2onnx compile library." FORCE) else() - SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so" CACHE FILEPATH "PADDLE2ONNX library." FORCE) - SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so" CACHE FILEPATH "PADDLE2ONNX library." FORCE) -endif(WIN32) + set(PADDLE2ONNX_SOURCE_LIB + "${PADDLE2ONNX_SOURCE_DIR}/lib/${PADDLE2ONNX_LIB_NAME}" + CACHE FILEPATH "PADDLE2ONNX source library." FORCE) + set(PADDLE2ONNX_LIB + "${PADDLE2ONNX_LIB_DIR}/${PADDLE2ONNX_LIB_NAME}" + CACHE FILEPATH "PADDLE2ONNX library." FORCE) + set(PADDLE2ONNX_COMPILE_LIB + ${PADDLE2ONNX_LIB} + CACHE FILEPATH "paddle2onnx compile library." FORCE) +endif() if(WIN32) set(PADDLE2ONNX_URL - "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-win-x64-0.9.7.zip" + "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-win-x64-${PADDLE2ONNX_VERSION}.zip" ) elseif(APPLE) set(PADDLE2ONNX_URL - "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-osx-x86_64-0.9.7.tgz" + "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-osx-x86_64-${PADDLE2ONNX_VERSION}.tgz" ) else() set(PADDLE2ONNX_URL - "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-linux-x64-0.9.7.tgz" + "https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-linux-x64-${PADDLE2ONNX_VERSION}.tgz" ) endif() -ExternalProject_Add( - ${PADDLE2ONNX_PROJECT} - ${EXTERNAL_PROJECT_LOG_ARGS} - URL ${PADDLE2ONNX_URL} - PREFIX ${PADDLE2ONNX_PREFIX_DIR} - DOWNLOAD_NO_PROGRESS 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - UPDATE_COMMAND "" - INSTALL_COMMAND - ${CMAKE_COMMAND} -E copy_directory ${PADDLE2ONNX_SOURCE_DIR}/lib - ${PADDLE2ONNX_LIB_DIR} && ${CMAKE_COMMAND} -E copy_directory - ${PADDLE2ONNX_SOURCE_DIR}/include ${PADDLE2ONNX_INC_DIR} - BUILD_BYPRODUCTS ${PADDLE2ONNX_COMPILE_LIB}) +if(WIN32) + ExternalProject_Add( + ${PADDLE2ONNX_PROJECT} + ${EXTERNAL_PROJECT_LOG_ARGS} + URL ${PADDLE2ONNX_URL} + PREFIX ${PADDLE2ONNX_PREFIX_DIR} + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy_directory ${PADDLE2ONNX_SOURCE_DIR}/lib + ${PADDLE2ONNX_LIB_DIR} && ${CMAKE_COMMAND} -E copy_directory + ${PADDLE2ONNX_SOURCE_DIR}/include ${PADDLE2ONNX_INC_DIR} + BUILD_BYPRODUCTS ${PADDLE2ONNX_COMPILE_LIB}) +else() + ExternalProject_Add( + ${PADDLE2ONNX_PROJECT} + ${EXTERNAL_PROJECT_LOG_ARGS} + URL ${PADDLE2ONNX_URL} + PREFIX ${PADDLE2ONNX_PREFIX_DIR} + DOWNLOAD_NO_PROGRESS 1 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + UPDATE_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_SOURCE_LIB} + ${PADDLE2ONNX_COMPILE_LIB} && ${CMAKE_COMMAND} -E copy_directory + ${PADDLE2ONNX_SOURCE_DIR}/include ${PADDLE2ONNX_INC_DIR} && + ${CMAKE_COMMAND} -E create_symlink ${PADDLE2ONNX_LIB_NAME} + ${PADDLE2ONNX_LIB_DIR}/${PADDLE2ONNX_LIB_NEW_NAME} + BUILD_BYPRODUCTS ${PADDLE2ONNX_COMPILE_LIB}) +endif() add_library(paddle2onnx STATIC IMPORTED GLOBAL) set_property(TARGET paddle2onnx PROPERTY IMPORTED_LOCATION diff --git a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt index 547e265d2fdb51fbdc22c703206ba55d84e4316f..a00e2fa1b3332f5526fbd8068132912d4f2d25ad 100644 --- a/paddle/fluid/inference/api/demo_ci/CMakeLists.txt +++ b/paddle/fluid/inference/api/demo_ci/CMakeLists.txt @@ -162,13 +162,7 @@ else() endif() if (WITH_ONNXRUNTIME) - if(WIN32) - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/onnxruntime.lib paddle2onnx) - elseif(APPLE) - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.1.10.0.dylib paddle2onnx) - else() - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.so.1.10.0 paddle2onnx) - endif() + set(DEPS ${DEPS} onnxruntime paddle2onnx) endif() diff --git a/paddle/fluid/inference/api/demo_ci/onnxruntime_mobilenet_demo.cc b/paddle/fluid/inference/api/demo_ci/onnxruntime_mobilenet_demo.cc index ef5c08cd041eb7af4c7f17a95c4fd9b8601e4bad..9e84560aa3624b0ad2d5a73da0d6ed9b8f0b452a 100644 --- a/paddle/fluid/inference/api/demo_ci/onnxruntime_mobilenet_demo.cc +++ b/paddle/fluid/inference/api/demo_ci/onnxruntime_mobilenet_demo.cc @@ -13,15 +13,19 @@ See the License for the specific language governing permissions and limitations under the License. */ /* - * This file contains demo of mobilenet for tensorrt. + * This file contains demo of mobilenet for onnxruntime backend. */ - #include // use glog instead of CHECK to avoid importing other paddle header files. + +#include +#include #include + #include "gflags/gflags.h" #include "utils.h" // NOLINT DEFINE_string(modeldir, "", "Directory of the inference model."); +DEFINE_string(data, "", "path of data"); namespace paddle { namespace demo { @@ -37,8 +41,21 @@ void Main() { auto predictor = paddle_infer::CreatePredictor(config); // Inference. + LOG(INFO) << "--- prepare input data ----"; std::vector input_shape = {1, 3, 224, 224}; - std::vector input_data(1 * 3 * 224 * 224, 1.0); + std::vector input_data; + std::string line; + std::ifstream file(FLAGS_data); + std::getline(file, line); + file.close(); + std::vector data_strs; + split(line, ' ', &data_strs); + int input_num = 0; + for (auto& d : data_strs) { + input_num += 1; + input_data.push_back(std::stof(d)); + } + std::vector out_data; out_data.resize(1000); auto input_names = predictor->GetInputNames(); @@ -51,7 +68,19 @@ void Main() { predictor->Run(); output_tensor->CopyToCpu(out_data.data()); - VLOG(3) << "output.size " << out_data.size(); + std::vector out_index(out_data.size()); + std::iota(out_index.begin(), out_index.end(), 0); + std::sort(out_index.begin(), out_index.end(), + [&out_data](int index1, int index2) { + return out_data[index1] > out_data[index2]; + }); + LOG(INFO) << "output.size " << out_data.size() + << " max_index:" << out_index[0]; + CHECK_EQ(out_data.size(), 1000); + int max_index = out_index[0]; + CHECK_EQ(max_index, 13); + float max_score = out_data[max_index]; + CHECK_LE(fabs(max_score - 0.99981), 1e-4); } } // namespace demo diff --git a/paddle/fluid/inference/api/demo_ci/run.sh b/paddle/fluid/inference/api/demo_ci/run.sh index c8a78a168a81c7ba653d1fc4a36c7ab6cea34c85..f11319d766548c4d63b9f9b83686944ad0b700e6 100755 --- a/paddle/fluid/inference/api/demo_ci/run.sh +++ b/paddle/fluid/inference/api/demo_ci/run.sh @@ -52,15 +52,17 @@ if [ $7 == ON ]; then mkdir -p MobileNetV2 cd MobileNetV2 if [[ -e "MobileNetV2.inference.model.tar.gz" ]]; then - echo "MobileNetV2.inference.model.tar.gz has been downloaded." - else + rm -rf MobileNetV2.inference.model.tar.gz + fi + # echo "MobileNetV2.inference.model.tar.gz has been downloaded." + # else if [ $WIN_DETECT != "" ]; then wget -q -Y off http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz else wget -q --no-proxy http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz fi tar xzf *.tar.gz - fi + # fi cd .. fi @@ -265,7 +267,8 @@ for WITH_STATIC_LIB in ON OFF; do -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME make -j$(nproc) ./onnxruntime_mobilenet_demo \ - --modeldir=$DATA_DIR/MobileNetV2/MobileNetV2 + --modeldir=$DATA_DIR/MobileNetV2/MobileNetV2 \ + --data=$DATA_DIR/MobileNetV2/MobileNetV2/data.txt if [ $? -ne 0 ]; then echo "onnxruntime_mobilenet_demo runs failed " >> ${current_dir}/test_summary.txt EXIT_CODE=1 diff --git a/paddle/fluid/inference/api/details/zero_copy_tensor.cc b/paddle/fluid/inference/api/details/zero_copy_tensor.cc index e6cea1b46f9e299f731a3c633c81146158cb72c1..910256834b1581f9164c749378c781c3837a0a2c 100644 --- a/paddle/fluid/inference/api/details/zero_copy_tensor.cc +++ b/paddle/fluid/inference/api/details/zero_copy_tensor.cc @@ -626,6 +626,10 @@ void Tensor::SetOrtBinding(const std::shared_ptr binding) { binding_ = binding; } +void Tensor::SetOrtBuffer(const std::shared_ptr> buffer) { + buffer_ = buffer; +} + Ort::Value GetOrtVaule(const Ort::MemoryInfo &memory_info, float *data, size_t size, const int64_t *shape, size_t shape_len) { return Ort::Value::CreateTensor(memory_info, data, size, shape, @@ -674,11 +678,12 @@ void Tensor::ORTCopyFromCpu(const T *data) { OrtMemTypeDefault); size_t size = std::accumulate(begin(shape_), end(shape_), 1UL, std::multiplies()); + auto buffer = buffer_.lock(); size_t buffer_size = size * sizeof(T); - if (buffer_size > buffer_.size()) { - buffer_.resize(buffer_size); + if (buffer_size > buffer->size()) { + buffer->resize(buffer_size); } - std::memcpy(static_cast(buffer_.data()), data, buffer_size); + std::memcpy(static_cast(buffer->data()), data, buffer_size); auto onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED; if (std::is_same::value) { @@ -695,16 +700,14 @@ void Tensor::ORTCopyFromCpu(const T *data) { onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8; } else if (std::is_same::value) { onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16; - } - - if (onnx_dtype == ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED) { + } else { PADDLE_THROW(paddle::platform::errors::InvalidArgument( "Found undefined data type for onnxruntime, only supports " "float16/float32/float64/int8/uint8/int32/int64.")); } auto ort_value = - Ort::Value::CreateTensor(memory_info, buffer_.data(), buffer_size, + Ort::Value::CreateTensor(memory_info, buffer->data(), buffer_size, shape_.data(), shape_.size(), onnx_dtype); binding->BindInput(name_.c_str(), ort_value); diff --git a/paddle/fluid/inference/api/onnxruntime_predictor.cc b/paddle/fluid/inference/api/onnxruntime_predictor.cc index 93a96863053e554f26d7f6696fc422a49c420a02..98fd3267acee83518603e2ab23b34b0de71a0fd3 100644 --- a/paddle/fluid/inference/api/onnxruntime_predictor.cc +++ b/paddle/fluid/inference/api/onnxruntime_predictor.cc @@ -254,6 +254,14 @@ std::unique_ptr ONNXRuntimePredictor::GetInputTensor( } res->SetOrtMark(true); res->SetOrtBinding(binding_); + auto iter = input_buffers_.find(name); + if (iter == input_buffers_.end()) { + std::vector i_vector; + input_buffers_[name] = std::make_shared>(i_vector); + res->SetOrtBuffer(input_buffers_[name]); + } else { + res->SetOrtBuffer(iter->second); + } return res; } diff --git a/paddle/fluid/inference/api/onnxruntime_predictor.h b/paddle/fluid/inference/api/onnxruntime_predictor.h index d01756e4b96b132e3f9c3815e96f612433616ff2..e7f7732d974193f57f9311815192edf46e27b282 100644 --- a/paddle/fluid/inference/api/onnxruntime_predictor.h +++ b/paddle/fluid/inference/api/onnxruntime_predictor.h @@ -199,6 +199,7 @@ class ONNXRuntimePredictor : public PaddlePredictor { platform::Place place_; std::vector input_desc_; std::vector output_desc_; + std::map>> input_buffers_; int predictor_id_; // Some more detailed tests, they are made the friends of the predictor, so that diff --git a/paddle/fluid/inference/api/paddle_tensor.h b/paddle/fluid/inference/api/paddle_tensor.h index 2ae5ac5e6d336a5fcd884d7cf0f8cb755fd7ae96..ce634ef08ca08501694168afb156caf7e9d8ecac 100644 --- a/paddle/fluid/inference/api/paddle_tensor.h +++ b/paddle/fluid/inference/api/paddle_tensor.h @@ -183,7 +183,7 @@ class PD_INFER_DECL Tensor { #ifdef PADDLE_WITH_ONNXRUNTIME bool is_ort_tensor_{false}; std::vector shape_; - std::vector buffer_; + std::weak_ptr> buffer_; std::weak_ptr binding_; int idx_{-1}; @@ -191,6 +191,8 @@ class PD_INFER_DECL Tensor { void SetOrtBinding(const std::shared_ptr binding); + void SetOrtBuffer(const std::shared_ptr> buffer); + template void ORTCopyFromCpu(const T* data); diff --git a/paddle/fluid/inference/tests/infer_ut/CMakeLists.txt b/paddle/fluid/inference/tests/infer_ut/CMakeLists.txt index ad7ef0c04ce67d705e1b4639f4bd92f505caec26..c10d44c7067f4c02a564f5232b7d3466bdfcc230 100644 --- a/paddle/fluid/inference/tests/infer_ut/CMakeLists.txt +++ b/paddle/fluid/inference/tests/infer_ut/CMakeLists.txt @@ -183,13 +183,7 @@ else() endif() if (WITH_ONNXRUNTIME) - if(WIN32) - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/onnxruntime.lib paddle2onnx) - elseif(APPLE) - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.1.10.0.dylib paddle2onnx) - else() - set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.so.1.10.0 paddle2onnx) - endif() + set(DEPS ${DEPS} onnxruntime paddle2onnx) endif() if (NOT WIN32) diff --git a/paddle/fluid/pybind/CMakeLists.txt b/paddle/fluid/pybind/CMakeLists.txt index 71a23409dad64928c97decc65d5b5161350fd8a8..916af4592d1857cd07c01d1543b9cc3f669d0d86 100644 --- a/paddle/fluid/pybind/CMakeLists.txt +++ b/paddle/fluid/pybind/CMakeLists.txt @@ -286,13 +286,10 @@ if(WITH_PYTHON) # LD_LIBRARY_PATH. This is different with Windows platformm, which search # *.dll in current directory automatically. if(WITH_ONNXRUNTIME) - if (APPLE) - set(PADDLE2ONNX_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libpaddle2onnx.dylib) - set(ONNXRUNTIME_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libonnxruntime.dylib) - else() - set(PADDLE2ONNX_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libpaddle2onnx.so) - set(ONNXRUNTIME_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libonnxruntime.so) - endif() + set(PADDLE2ONNX_PYBIND_OUT + ${CMAKE_CURRENT_BINARY_DIR}/${PADDLE2ONNX_LIB_NAME}) + set(ONNXRUNTIME_PYBIND_OUT + ${CMAKE_CURRENT_BINARY_DIR}/${ONNXRUNTIME_LIB_NAME}) ADD_CUSTOM_COMMAND(OUTPUT ${PADDLE2ONNX_PYBIND_OUT} COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_LIB} ${CMAKE_CURRENT_BINARY_DIR} diff --git a/python/setup.py.in b/python/setup.py.in index 6b899bb447a55adaff4766e7e7f550b678efe755..ee9e9b7e8ec401099817226bd27e3971802b0990 100755 --- a/python/setup.py.in +++ b/python/setup.py.in @@ -531,10 +531,8 @@ if '${WITH_ONNXRUNTIME}' == 'ON': shutil.copy('${PADDLE2ONNX_LIB}', libs_path) if os.name == 'nt': package_data['paddle.libs']+=['paddle2onnx.dll', 'onnxruntime.dll'] - elif sys.platform == 'darwin': - package_data['paddle.libs']+=['libpaddle2onnx.dylib', 'libonnxruntime.1.10.0.dylib'] else: - package_data['paddle.libs']+=['libpaddle2onnx.so', 'libonnxruntime.so.1.10.0'] + package_data['paddle.libs']+=['${PADDLE2ONNX_LIB_NAME}', '${ONNXRUNTIME_LIB_NAME}'] if '${WITH_XPU}' == 'ON': # only change rpath in Release mode,