diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 47c88c242e025c649e4327917cf2a793b90b9849..db2ffe11c6a3f1b9af4c1826dfbd27f7569fe804 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -520,4 +520,7 @@ if (NOT FOUND_MATCH) ADD_EXECUTABLE(test-conv-gpu operators/test_conv_gpu.cpp test_helper.h test_include.h) target_link_libraries(test-conv-gpu paddle-mobile) + + ADD_EXECUTABLE(test-net-benchmark net/test_net_benchmark.cpp test_helper.h test_include.h) + target_link_libraries(test-net-benchmark paddle-mobile) endif () diff --git a/test/net/test_net_benchmark.cpp b/test/net/test_net_benchmark.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f874683148e95180a5c1376e8d6a3233a2cabe1b --- /dev/null +++ b/test/net/test_net_benchmark.cpp @@ -0,0 +1,59 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include "../test_helper.h" +#include "../test_include.h" + +int main() { + paddle_mobile::PaddleMobile paddle_mobile; + paddle_mobile.SetThreadNum(1); + auto time1 = paddle_mobile::time(); + + auto isok = + paddle_mobile.Load(std::string(g_yolo) + "/model", + std::string(g_yolo) + "/params", true, false, 1, true); + if (isok) { + auto time2 = paddle_mobile::time(); + std::cout << "load cost :" << paddle_mobile::time_diff(time1, time1) << "ms" + << std::endl; + + std::vector input; + std::vector dims{1, 3, 64, 64}; + GetInput(g_test_image_1x3x224x224_banana, &input, dims); + + paddle_mobile::framework::DDim ddim = + paddle_mobile::framework::make_ddim(dims); + Tensor feed_tensor(input, paddle_mobile::framework::make_ddim(dims)); + + // 预热十次 + for (int i = 0; i < 10; ++i) { + // auto vec_result = paddle_mobile.Predict(input, dims); + paddle_mobile.Feed("data", feed_tensor); + paddle_mobile.Predict(); + } + auto time3 = paddle_mobile::time(); + for (int i = 0; i < 100; ++i) { + // auto vec_result = paddle_mobile.Predict(input, dims); + paddle_mobile.Feed("data", feed_tensor); + paddle_mobile.Predict(); + } + auto time4 = paddle_mobile::time(); + std::cout << "predict cost :" + << paddle_mobile::time_diff(time3, time4) / 100 << "ms" + << std::endl; + } + + return 0; +}