未验证 提交 fda4d42c 编写于 作者: J Jiaying Zhao 提交者: GitHub

add GetExceptionMsg for paddle_inference_api. test=develop (#2231)

上级 305130fc
......@@ -111,10 +111,14 @@ bool PaddleMobilePredictor<Device, T>::Run(
if (input.dtype == UINT8) {
framework::Tensor input_tensor(static_cast<uint8_t *>(input.data.data()),
ddim);
paddle_mobile_->Predict(input_tensor);
if (paddle_mobile_->Predict(input_tensor) != PMStatus::PMSuccess) {
return false;
}
} else {
framework::Tensor input_tensor(static_cast<T *>(input.data.data()), ddim);
paddle_mobile_->Predict(input_tensor);
if (paddle_mobile_->Predict(input_tensor) != PMStatus::PMSuccess) {
return false;
}
}
}
......@@ -153,6 +157,11 @@ bool PaddleMobilePredictor<Device, T>::Run(
return true;
}
template <typename Device, typename T>
std::string PaddleMobilePredictor<Device, T>::GetExceptionMsg() {
return paddle_mobile_->GetExceptionMsg();
}
#ifdef PADDLE_MOBILE_FPGA
void ConvertPaddleTensors(const PaddleTensor &src, framework::Tensor *des) {
des->Resize(framework::make_ddim(src.shape));
......
......@@ -32,6 +32,7 @@ class PaddleMobilePredictor : public PaddlePredictor {
bool Run(const std::vector<PaddleTensor>& inputs,
std::vector<PaddleTensor>* output_data,
int batch_size = -1) override;
std::string GetExceptionMsg();
#ifdef PADDLE_MOBILE_FPGA
void Predict_From_To(int start, int end) override;
void FeedPaddleTensors(const std::vector<PaddleTensor>& inputs) override;
......
......@@ -174,6 +174,7 @@ class PaddlePredictor {
virtual bool Run(const std::vector<PaddleTensor>& inputs,
std::vector<PaddleTensor>* output_data,
int batch_size = -1) = 0;
virtual std::string GetExceptionMsg() { return ""; }
// Destroy the Predictor.
virtual ~PaddlePredictor() = default;
......
......@@ -540,6 +540,12 @@ else()
ADD_EXECUTABLE(test-net net/test_net.cpp test_helper.h test_include.h executor_for_test.h)
target_link_libraries(test-net paddle-mobile)
ADD_EXECUTABLE(test-super net/test_super.cpp test_helper.h test_include.h executor_for_test.h)
target_link_libraries(test-super paddle-mobile)
ADD_EXECUTABLE(test-inference-pre-post net/test_inference_pre_post.cpp)
target_link_libraries(test-inference-pre-post paddle-mobile)
ADD_EXECUTABLE(test-inference-super net/test_inference_super.cpp)
target_link_libraries(test-inference-super paddle-mobile)
endif()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册