未验证 提交 babba557 编写于 作者: H heliqi 提交者: GitHub

[cherry pick 2.3][Inference]Fix the ort Backend multiple input bug(#43621 #43742) (#43739)

* cherry pick form develop 43621

* code format

* paddle2onnx update to 0.9.8
上级 a7e0cdea
......@@ -12,53 +12,91 @@
# See the License for the specific language governing permissions and
# limitations under the License.
if (NOT WITH_ONNXRUNTIME)
if(NOT WITH_ONNXRUNTIME)
return()
endif ()
endif()
if (WITH_ARM)
if(WITH_ARM)
message(SEND_ERROR "The current onnxruntime backend doesn't support ARM cpu")
return()
endif ()
endif()
INCLUDE(ExternalProject)
include(ExternalProject)
add_definitions(-DPADDLE_WITH_ONNXRUNTIME)
SET(ONNXRUNTIME_PROJECT "extern_onnxruntime")
SET(ONNXRUNTIME_PREFIX_DIR ${THIRD_PARTY_PATH}/onnxruntime)
SET(ONNXRUNTIME_SOURCE_DIR ${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME_PROJECT})
SET(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime)
SET(ONNXRUNTIME_INC_DIR "${ONNXRUNTIME_INSTALL_DIR}/include" CACHE PATH "onnxruntime include directory." FORCE)
SET(ONNXRUNTIME_LIB_DIR "${ONNXRUNTIME_INSTALL_DIR}/lib" CACHE PATH "onnxruntime lib directory." FORCE)
SET(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${ONNXRUNTIME_LIB_DIR}")
set(ONNXRUNTIME_PROJECT "extern_onnxruntime")
set(ONNXRUNTIME_VERSION "1.10.0")
set(ONNXRUNTIME_PREFIX_DIR ${THIRD_PARTY_PATH}/onnxruntime)
set(ONNXRUNTIME_SOURCE_DIR
${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME_PROJECT})
set(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime)
set(ONNXRUNTIME_INC_DIR
"${ONNXRUNTIME_INSTALL_DIR}/include"
CACHE PATH "onnxruntime include directory." FORCE)
set(ONNXRUNTIME_LIB_DIR
"${ONNXRUNTIME_INSTALL_DIR}/lib"
CACHE PATH "onnxruntime lib directory." FORCE)
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${ONNXRUNTIME_LIB_DIR}")
if (WIN32)
SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-win-x64-1.10.0.zip")
elseif (APPLE)
SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-osx-x86_64-1.10.0.tgz")
else ()
SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-linux-x64-1.10.0.tgz")
if(WIN32)
set(ONNXRUNTIME_URL
"https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-win-x64-${ONNXRUNTIME_VERSION}.zip"
)
elseif(APPLE)
set(ONNXRUNTIME_URL
"https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-osx-x86_64-${ONNXRUNTIME_VERSION}.tgz"
)
else()
set(ONNXRUNTIME_URL
"https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-${ONNXRUNTIME_VERSION}.tgz"
)
endif()
# For ONNXRUNTIME code to include internal headers.
include_directories(${ONNXRUNTIME_INC_DIR})
INCLUDE_DIRECTORIES(${ONNXRUNTIME_INC_DIR}) # For ONNXRUNTIME code to include internal headers.
if (WIN32)
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
elseif (APPLE)
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
else ()
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
endif ()
set(ONNXRUNTIME_LIB_NEW_NAME "libonnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}")
if(APPLE)
set(ONNXRUNTIME_LIB_NAME
"libonnxruntime.${ONNXRUNTIME_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}")
else()
set(ONNXRUNTIME_LIB_NAME
"libonnxruntime${CMAKE_SHARED_LIBRARY_SUFFIX}.${ONNXRUNTIME_VERSION}")
endif()
if(WIN32)
set(ONNXRUNTIME_SOURCE_LIB
"${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll"
CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
set(ONNXRUNTIME_SHARED_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll"
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
elseif(APPLE)
set(ONNXRUNTIME_SOURCE_LIB
"${ONNXRUNTIME_SOURCE_DIR}/lib/${ONNXRUNTIME_LIB_NAME}"
CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/${ONNXRUNTIME_LIB_NAME}"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
set(ONNXRUNTIME_SHARED_LIB
${ONNXRUNTIME_LIB}
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
else()
set(ONNXRUNTIME_SOURCE_LIB
"${ONNXRUNTIME_SOURCE_DIR}/lib/${ONNXRUNTIME_LIB_NAME}"
CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
set(ONNXRUNTIME_LIB
"${ONNXRUNTIME_INSTALL_DIR}/lib/${ONNXRUNTIME_LIB_NAME}"
CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
set(ONNXRUNTIME_SHARED_LIB
${ONNXRUNTIME_LIB}
CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
endif()
if (WIN32)
if(WIN32)
ExternalProject_Add(
${ONNXRUNTIME_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
......@@ -68,12 +106,14 @@ if (WIN32)
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
UPDATE_COMMAND ""
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_SHARED_LIB} &&
${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.lib ${ONNXRUNTIME_LIB} &&
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include ${ONNXRUNTIME_INC_DIR}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB}
)
else ()
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB}
${ONNXRUNTIME_SHARED_LIB} && ${CMAKE_COMMAND} -E copy
${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.lib ${ONNXRUNTIME_LIB} &&
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include
${ONNXRUNTIME_INC_DIR}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB})
else()
ExternalProject_Add(
${ONNXRUNTIME_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
......@@ -83,12 +123,14 @@ else ()
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
UPDATE_COMMAND ""
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_LIB} &&
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include ${ONNXRUNTIME_INC_DIR}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB}
)
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_LIB} &&
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include
${ONNXRUNTIME_INC_DIR} && ${CMAKE_COMMAND} -E create_symlink
${ONNXRUNTIME_LIB_NAME} ${ONNXRUNTIME_LIB_DIR}/${ONNXRUNTIME_LIB_NEW_NAME}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB})
endif()
ADD_LIBRARY(onnxruntime STATIC IMPORTED GLOBAL)
SET_PROPERTY(TARGET onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB})
ADD_DEPENDENCIES(onnxruntime ${ONNXRUNTIME_PROJECT})
add_library(onnxruntime STATIC IMPORTED GLOBAL)
set_property(TARGET onnxruntime PROPERTY IMPORTED_LOCATION ${ONNXRUNTIME_LIB})
add_dependencies(onnxruntime ${ONNXRUNTIME_PROJECT})
......@@ -16,50 +16,73 @@ if(NOT WITH_ONNXRUNTIME)
return()
endif()
if (WITH_ARM)
if(WITH_ARM)
message(SEND_ERROR "The current onnxruntime backend doesn't support ARM cpu")
return()
endif ()
endif()
INCLUDE(ExternalProject)
include(ExternalProject)
SET(PADDLE2ONNX_PROJECT "extern_paddle2onnx")
SET(PADDLE2ONNX_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle2onnx)
SET(PADDLE2ONNX_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle2onnx)
SET(PADDLE2ONNX_SOURCE_DIR ${THIRD_PARTY_PATH}/paddle2onnx/src/${PADDLE2ONNX_PROJECT})
SET(PADDLE2ONNX_INC_DIR "${PADDLE2ONNX_INSTALL_DIR}/include" CACHE PATH "paddle2onnx include directory." FORCE)
SET(PADDLE2ONNX_LIB_DIR
set(PADDLE2ONNX_PROJECT "extern_paddle2onnx")
set(PADDLE2ONNX_VERSION "0.9.8")
set(PADDLE2ONNX_PREFIX_DIR ${THIRD_PARTY_PATH}/paddle2onnx)
set(PADDLE2ONNX_SOURCE_DIR
${THIRD_PARTY_PATH}/paddle2onnx/src/${PADDLE2ONNX_PROJECT})
set(PADDLE2ONNX_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle2onnx)
set(PADDLE2ONNX_INC_DIR
"${PADDLE2ONNX_INSTALL_DIR}/include"
CACHE PATH "paddle2onnx include directory." FORCE)
set(PADDLE2ONNX_LIB_DIR
"${PADDLE2ONNX_INSTALL_DIR}/lib"
CACHE PATH "onnxruntime lib directory." FORCE)
SET(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}")
set(CMAKE_BUILD_RPATH "${CMAKE_BUILD_RPATH}" "${PADDLE2ONNX_LIB_DIR}")
# For PADDLE2ONNX code to include internal headers.
include_directories(${PADDLE2ONNX_INC_DIR})
set(PADDLE2ONNX_LIB_NEW_NAME "libpaddle2onnx${CMAKE_SHARED_LIBRARY_SUFFIX}")
if(APPLE)
set(PADDLE2ONNX_LIB_NAME
"libpaddle2onnx.${PADDLE2ONNX_VERSION}${CMAKE_SHARED_LIBRARY_SUFFIX}")
else()
set(PADDLE2ONNX_LIB_NAME
"libpaddle2onnx${CMAKE_SHARED_LIBRARY_SUFFIX}.${PADDLE2ONNX_VERSION}")
endif()
INCLUDE_DIRECTORIES(${PADDLE2ONNX_INC_DIR}) # For PADDLE2ONNX code to include internal headers.
if(WIN32)
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.dll" CACHE FILEPATH "paddle2onnx library." FORCE)
SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.lib" CACHE FILEPATH "paddle2onnx compile library." FORCE)
elseif(APPLE)
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib" CACHE FILEPATH "PADDLE2ONNX library." FORCE)
SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.dylib" CACHE FILEPATH "paddle2onnx compile library." FORCE)
set(PADDLE2ONNX_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.dll"
CACHE FILEPATH "paddle2onnx library." FORCE)
set(PADDLE2ONNX_COMPILE_LIB
"${PADDLE2ONNX_INSTALL_DIR}/lib/paddle2onnx.lib"
CACHE FILEPATH "paddle2onnx compile library." FORCE)
else()
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so" CACHE FILEPATH "PADDLE2ONNX library." FORCE)
SET(PADDLE2ONNX_COMPILE_LIB "${PADDLE2ONNX_INSTALL_DIR}/lib/libpaddle2onnx.so" CACHE FILEPATH "PADDLE2ONNX library." FORCE)
endif(WIN32)
set(PADDLE2ONNX_SOURCE_LIB
"${PADDLE2ONNX_SOURCE_DIR}/lib/${PADDLE2ONNX_LIB_NAME}"
CACHE FILEPATH "PADDLE2ONNX source library." FORCE)
set(PADDLE2ONNX_LIB
"${PADDLE2ONNX_LIB_DIR}/${PADDLE2ONNX_LIB_NAME}"
CACHE FILEPATH "PADDLE2ONNX library." FORCE)
set(PADDLE2ONNX_COMPILE_LIB
${PADDLE2ONNX_LIB}
CACHE FILEPATH "paddle2onnx compile library." FORCE)
endif()
if(WIN32)
set(PADDLE2ONNX_URL
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-win-x64-0.9.7.zip"
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-win-x64-${PADDLE2ONNX_VERSION}.zip"
)
elseif(APPLE)
set(PADDLE2ONNX_URL
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-osx-x86_64-0.9.7.tgz"
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-osx-x86_64-${PADDLE2ONNX_VERSION}.tgz"
)
else()
set(PADDLE2ONNX_URL
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v0.9.7/paddle2onnx-linux-x64-0.9.7.tgz"
"https://github.com/PaddlePaddle/Paddle2ONNX/releases/download/v${PADDLE2ONNX_VERSION}/paddle2onnx-linux-x64-${PADDLE2ONNX_VERSION}.tgz"
)
endif()
ExternalProject_Add(
if(WIN32)
ExternalProject_Add(
${PADDLE2ONNX_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
URL ${PADDLE2ONNX_URL}
......@@ -73,6 +96,24 @@ ExternalProject_Add(
${PADDLE2ONNX_LIB_DIR} && ${CMAKE_COMMAND} -E copy_directory
${PADDLE2ONNX_SOURCE_DIR}/include ${PADDLE2ONNX_INC_DIR}
BUILD_BYPRODUCTS ${PADDLE2ONNX_COMPILE_LIB})
else()
ExternalProject_Add(
${PADDLE2ONNX_PROJECT}
${EXTERNAL_PROJECT_LOG_ARGS}
URL ${PADDLE2ONNX_URL}
PREFIX ${PADDLE2ONNX_PREFIX_DIR}
DOWNLOAD_NO_PROGRESS 1
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
UPDATE_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_SOURCE_LIB}
${PADDLE2ONNX_COMPILE_LIB} && ${CMAKE_COMMAND} -E copy_directory
${PADDLE2ONNX_SOURCE_DIR}/include ${PADDLE2ONNX_INC_DIR} &&
${CMAKE_COMMAND} -E create_symlink ${PADDLE2ONNX_LIB_NAME}
${PADDLE2ONNX_LIB_DIR}/${PADDLE2ONNX_LIB_NEW_NAME}
BUILD_BYPRODUCTS ${PADDLE2ONNX_COMPILE_LIB})
endif()
add_library(paddle2onnx STATIC IMPORTED GLOBAL)
set_property(TARGET paddle2onnx PROPERTY IMPORTED_LOCATION
......
......@@ -162,13 +162,7 @@ else()
endif()
if (WITH_ONNXRUNTIME)
if(WIN32)
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/onnxruntime.lib paddle2onnx)
elseif(APPLE)
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.1.10.0.dylib paddle2onnx)
else()
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.so.1.10.0 paddle2onnx)
endif()
set(DEPS ${DEPS} onnxruntime paddle2onnx)
endif()
......
......@@ -13,15 +13,19 @@ See the License for the specific language governing permissions and
limitations under the License. */
/*
* This file contains demo of mobilenet for tensorrt.
* This file contains demo of mobilenet for onnxruntime backend.
*/
#include <glog/logging.h> // use glog instead of CHECK to avoid importing other paddle header files.
#include <algorithm>
#include <numeric>
#include <vector>
#include "gflags/gflags.h"
#include "utils.h" // NOLINT
DEFINE_string(modeldir, "", "Directory of the inference model.");
DEFINE_string(data, "", "path of data");
namespace paddle {
namespace demo {
......@@ -37,8 +41,21 @@ void Main() {
auto predictor = paddle_infer::CreatePredictor(config);
// Inference.
LOG(INFO) << "--- prepare input data ----";
std::vector<int> input_shape = {1, 3, 224, 224};
std::vector<float> input_data(1 * 3 * 224 * 224, 1.0);
std::vector<float> input_data;
std::string line;
std::ifstream file(FLAGS_data);
std::getline(file, line);
file.close();
std::vector<std::string> data_strs;
split(line, ' ', &data_strs);
int input_num = 0;
for (auto& d : data_strs) {
input_num += 1;
input_data.push_back(std::stof(d));
}
std::vector<float> out_data;
out_data.resize(1000);
auto input_names = predictor->GetInputNames();
......@@ -51,7 +68,19 @@ void Main() {
predictor->Run();
output_tensor->CopyToCpu(out_data.data());
VLOG(3) << "output.size " << out_data.size();
std::vector<int> out_index(out_data.size());
std::iota(out_index.begin(), out_index.end(), 0);
std::sort(out_index.begin(), out_index.end(),
[&out_data](int index1, int index2) {
return out_data[index1] > out_data[index2];
});
LOG(INFO) << "output.size " << out_data.size()
<< " max_index:" << out_index[0];
CHECK_EQ(out_data.size(), 1000);
int max_index = out_index[0];
CHECK_EQ(max_index, 13);
float max_score = out_data[max_index];
CHECK_LE(fabs(max_score - 0.99981), 1e-4);
}
} // namespace demo
......
......@@ -52,15 +52,17 @@ if [ $7 == ON ]; then
mkdir -p MobileNetV2
cd MobileNetV2
if [[ -e "MobileNetV2.inference.model.tar.gz" ]]; then
echo "MobileNetV2.inference.model.tar.gz has been downloaded."
else
rm -rf MobileNetV2.inference.model.tar.gz
fi
# echo "MobileNetV2.inference.model.tar.gz has been downloaded."
# else
if [ $WIN_DETECT != "" ]; then
wget -q -Y off http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
else
wget -q --no-proxy http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
fi
tar xzf *.tar.gz
fi
# fi
cd ..
fi
......@@ -265,7 +267,8 @@ for WITH_STATIC_LIB in ON OFF; do
-DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
make -j$(nproc)
./onnxruntime_mobilenet_demo \
--modeldir=$DATA_DIR/MobileNetV2/MobileNetV2
--modeldir=$DATA_DIR/MobileNetV2/MobileNetV2 \
--data=$DATA_DIR/MobileNetV2/MobileNetV2/data.txt
if [ $? -ne 0 ]; then
echo "onnxruntime_mobilenet_demo runs failed " >> ${current_dir}/test_summary.txt
EXIT_CODE=1
......
......@@ -626,6 +626,10 @@ void Tensor::SetOrtBinding(const std::shared_ptr<Ort::IoBinding> binding) {
binding_ = binding;
}
void Tensor::SetOrtBuffer(const std::shared_ptr<std::vector<int8_t>> buffer) {
buffer_ = buffer;
}
Ort::Value GetOrtVaule(const Ort::MemoryInfo &memory_info, float *data,
size_t size, const int64_t *shape, size_t shape_len) {
return Ort::Value::CreateTensor<float>(memory_info, data, size, shape,
......@@ -674,11 +678,12 @@ void Tensor::ORTCopyFromCpu(const T *data) {
OrtMemTypeDefault);
size_t size = std::accumulate(begin(shape_), end(shape_), 1UL,
std::multiplies<size_t>());
auto buffer = buffer_.lock();
size_t buffer_size = size * sizeof(T);
if (buffer_size > buffer_.size()) {
buffer_.resize(buffer_size);
if (buffer_size > buffer->size()) {
buffer->resize(buffer_size);
}
std::memcpy(static_cast<void *>(buffer_.data()), data, buffer_size);
std::memcpy(static_cast<void *>(buffer->data()), data, buffer_size);
auto onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED;
if (std::is_same<T, float>::value) {
......@@ -695,16 +700,14 @@ void Tensor::ORTCopyFromCpu(const T *data) {
onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_INT8;
} else if (std::is_same<T, float16>::value) {
onnx_dtype = ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16;
}
if (onnx_dtype == ONNX_TENSOR_ELEMENT_DATA_TYPE_UNDEFINED) {
} else {
PADDLE_THROW(paddle::platform::errors::InvalidArgument(
"Found undefined data type for onnxruntime, only supports "
"float16/float32/float64/int8/uint8/int32/int64."));
}
auto ort_value =
Ort::Value::CreateTensor(memory_info, buffer_.data(), buffer_size,
Ort::Value::CreateTensor(memory_info, buffer->data(), buffer_size,
shape_.data(), shape_.size(), onnx_dtype);
binding->BindInput(name_.c_str(), ort_value);
......
......@@ -254,6 +254,14 @@ std::unique_ptr<ZeroCopyTensor> ONNXRuntimePredictor::GetInputTensor(
}
res->SetOrtMark(true);
res->SetOrtBinding(binding_);
auto iter = input_buffers_.find(name);
if (iter == input_buffers_.end()) {
std::vector<int8_t> i_vector;
input_buffers_[name] = std::make_shared<std::vector<int8_t>>(i_vector);
res->SetOrtBuffer(input_buffers_[name]);
} else {
res->SetOrtBuffer(iter->second);
}
return res;
}
......
......@@ -199,6 +199,7 @@ class ONNXRuntimePredictor : public PaddlePredictor {
platform::Place place_;
std::vector<ONNXDesc> input_desc_;
std::vector<ONNXDesc> output_desc_;
std::map<std::string, std::shared_ptr<std::vector<int8_t>>> input_buffers_;
int predictor_id_;
// Some more detailed tests, they are made the friends of the predictor, so that
......
......@@ -183,7 +183,7 @@ class PD_INFER_DECL Tensor {
#ifdef PADDLE_WITH_ONNXRUNTIME
bool is_ort_tensor_{false};
std::vector<int64_t> shape_;
std::vector<int8_t> buffer_;
std::weak_ptr<std::vector<int8_t>> buffer_;
std::weak_ptr<Ort::IoBinding> binding_;
int idx_{-1};
......@@ -191,6 +191,8 @@ class PD_INFER_DECL Tensor {
void SetOrtBinding(const std::shared_ptr<Ort::IoBinding> binding);
void SetOrtBuffer(const std::shared_ptr<std::vector<int8_t>> buffer);
template <typename T>
void ORTCopyFromCpu(const T* data);
......
......@@ -183,13 +183,7 @@ else()
endif()
if (WITH_ONNXRUNTIME)
if(WIN32)
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/onnxruntime.lib paddle2onnx)
elseif(APPLE)
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.1.10.0.dylib paddle2onnx)
else()
set(DEPS ${DEPS} ${PADDLE_LIB_THIRD_PARTY_PATH}onnxruntime/lib/libonnxruntime.so.1.10.0 paddle2onnx)
endif()
set(DEPS ${DEPS} onnxruntime paddle2onnx)
endif()
if (NOT WIN32)
......
......@@ -286,13 +286,10 @@ if(WITH_PYTHON)
# LD_LIBRARY_PATH. This is different with Windows platformm, which search
# *.dll in current directory automatically.
if(WITH_ONNXRUNTIME)
if (APPLE)
set(PADDLE2ONNX_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libpaddle2onnx.dylib)
set(ONNXRUNTIME_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libonnxruntime.dylib)
else()
set(PADDLE2ONNX_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libpaddle2onnx.so)
set(ONNXRUNTIME_PYBIND_OUT ${CMAKE_CURRENT_BINARY_DIR}/libonnxruntime.so)
endif()
set(PADDLE2ONNX_PYBIND_OUT
${CMAKE_CURRENT_BINARY_DIR}/${PADDLE2ONNX_LIB_NAME})
set(ONNXRUNTIME_PYBIND_OUT
${CMAKE_CURRENT_BINARY_DIR}/${ONNXRUNTIME_LIB_NAME})
ADD_CUSTOM_COMMAND(OUTPUT ${PADDLE2ONNX_PYBIND_OUT}
COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_LIB} ${CMAKE_CURRENT_BINARY_DIR}
......
......@@ -531,10 +531,8 @@ if '${WITH_ONNXRUNTIME}' == 'ON':
shutil.copy('${PADDLE2ONNX_LIB}', libs_path)
if os.name == 'nt':
package_data['paddle.libs']+=['paddle2onnx.dll', 'onnxruntime.dll']
elif sys.platform == 'darwin':
package_data['paddle.libs']+=['libpaddle2onnx.dylib', 'libonnxruntime.1.10.0.dylib']
else:
package_data['paddle.libs']+=['libpaddle2onnx.so', 'libonnxruntime.so.1.10.0']
package_data['paddle.libs']+=['${PADDLE2ONNX_LIB_NAME}', '${ONNXRUNTIME_LIB_NAME}']
if '${WITH_XPU}' == 'ON':
# only change rpath in Release mode,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册