From 16761c1504a640a60308e7b13cde840f3d599911 Mon Sep 17 00:00:00 2001 From: yangfei Date: Mon, 12 Nov 2018 19:08:30 +0800 Subject: [PATCH] add cpu and gpu predict function --- src/io/api_paddle_mobile.cc | 5 ---- src/io/api_paddle_mobile.h | 2 -- src/io/paddle_inference_api.h | 1 - src/io/paddle_test_inference_api.cpp | 34 ++++++++++++++++++++++++++++ src/io/paddle_test_inference_api.h | 34 ++++++++++++++++++++++++++++ test/net/test_yologpu.cpp | 16 ++++++++----- 6 files changed, 78 insertions(+), 14 deletions(-) create mode 100644 src/io/paddle_test_inference_api.cpp create mode 100644 src/io/paddle_test_inference_api.h diff --git a/src/io/api_paddle_mobile.cc b/src/io/api_paddle_mobile.cc index c5da3993d1..8088f0b8c9 100644 --- a/src/io/api_paddle_mobile.cc +++ b/src/io/api_paddle_mobile.cc @@ -52,11 +52,6 @@ bool PaddleMobilePredictor::Init(const PaddleMobileConfig &config) { paddle_mobile_->SetThreadNum(config.thread_num); return true; } -template -double PaddleMobilePredictor::CaculatePredictTime() { - return paddle_mobile_->GetPredictTime(); -}; - template bool PaddleMobilePredictor::Run( const std::vector &inputs, diff --git a/src/io/api_paddle_mobile.h b/src/io/api_paddle_mobile.h index d8e5f856c6..bdeb7e1865 100644 --- a/src/io/api_paddle_mobile.h +++ b/src/io/api_paddle_mobile.h @@ -40,8 +40,6 @@ class PaddleMobilePredictor : public PaddlePredictor { std::vector* output_data, int batch_size = -1) override; - double CaculatePredictTime() override; - ~PaddleMobilePredictor() override; private: diff --git a/src/io/paddle_inference_api.h b/src/io/paddle_inference_api.h index 33a166f2c5..5326f864a4 100644 --- a/src/io/paddle_inference_api.h +++ b/src/io/paddle_inference_api.h @@ -98,7 +98,6 @@ class PaddlePredictor { virtual bool Run(const std::vector& inputs, std::vector* output_data, int batch_size = -1) = 0; - virtual double CaculatePredictTime() = 0; // Destroy the Predictor. virtual ~PaddlePredictor() = default; diff --git a/src/io/paddle_test_inference_api.cpp b/src/io/paddle_test_inference_api.cpp new file mode 100644 index 0000000000..1d69856653 --- /dev/null +++ b/src/io/paddle_test_inference_api.cpp @@ -0,0 +1,34 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include "io/paddle_test_inference_api.h" +namespace paddle_mobile { +template +double PaddleTester::CaculatePredictTime(std::string *cl_path) { + PaddleMobile paddle_mobile; +#ifdef PADDLE_MOBILE_CL + if (cl_path) { + paddle_mobile.SetCLPath(*cl_path); + } + +#endif + return paddle_mobile.GetPredictTime(); +} +template class PaddleTester; +template class PaddleTester; +template class PaddleTester; + +template class PaddleTester; + +} // namespace paddle_mobile diff --git a/src/io/paddle_test_inference_api.h b/src/io/paddle_test_inference_api.h new file mode 100644 index 0000000000..528647aae0 --- /dev/null +++ b/src/io/paddle_test_inference_api.h @@ -0,0 +1,34 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +/* + * This file contains the definition of a simple Inference API for Paddle. + * + * ATTENTION: It requires some C++ features, for lower version C++ or C, we + * might release another API. + */ + +#pragma once +#include "common/types.h" +#include "io/paddle_mobile.h" +#include "string" + +namespace paddle_mobile { +template +class PaddleTester { + public: + double CaculatePredictTime(std::string *cl_path = nullptr); +}; + +} // namespace paddle_mobile diff --git a/test/net/test_yologpu.cpp b/test/net/test_yologpu.cpp index e77861caba..0215ded59e 100644 --- a/test/net/test_yologpu.cpp +++ b/test/net/test_yologpu.cpp @@ -15,17 +15,21 @@ limitations under the License. */ #include #include #include "../../src/common/types.h" +#include "../../src/io/paddle_test_inference_api.h" #include "../test_helper.h" #include "../test_include.h" void t1() { paddle_mobile::PaddleMobile paddle_mobile_gpu; paddle_mobile::PaddleMobile paddle_mobile_cpu; + paddle_mobile::PaddleTester paddle_test_cpu; + paddle_mobile::PaddleTester paddle_test_gpu; + printf("cpu time:%f\n", paddle_test_cpu.CaculatePredictTime()); + std::string path = "/data/local/tmp/bin"; + printf("gpu time:%f\n", paddle_test_gpu.CaculatePredictTime(&path)); // paddle_mobile.SetThreadNum(4); #ifdef PADDLE_MOBILE_CL paddle_mobile_gpu.SetCLPath("/data/local/tmp/bin"); #endif - printf("cpu time:%f\n", paddle_mobile_cpu.GetPredictTime()); - printf("gpu time:%f\n", paddle_mobile_gpu.GetPredictTime()); auto time1 = paddle_mobile::time(); auto isok = paddle_mobile_gpu.Load(std::string(g_yolo_mul) + "/model", std::string(g_yolo_mul) + "/params", true); @@ -175,11 +179,11 @@ void t3() { int main() { // std::thread th1(t1); // std::thread th2(t2); - std::thread th3(t3); - // std::thread th1(t1); + // std::thread th3(t3); + std::thread th1(t1); // th1.join(); // th2.join(); - th3.join(); - // th1.join(); + // th3.join(); + th1.join(); return 0; } -- GitLab