diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index f96c920d285b5b6a500cca9ea58e360777bde393..548ef382576a968e32e99b0d351276c486a06bbd 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -243,7 +243,13 @@ if(WITH_GPU AND TENSORRT_FOUND) if (NOT EXISTS ${TRT_MODEL_INSTALL_DIR}) inference_download_and_uncompress(${TRT_MODEL_INSTALL_DIR} ${INFERENCE_URL}/tensorrt_test "trt_test_models.tar.gz") endif() - inference_analysis_test(test_trt_models SRCS trt_models_tester.cc + inference_analysis_test(trt_mobilenet_test SRCS trt_mobilenet_test.cc + EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} + ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_test_models) + inference_analysis_test(trt_resnet50_test SRCS trt_resnet50_test.cc + EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} + ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_test_models) + inference_analysis_test(trt_resnext_test SRCS trt_resnext_test.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${TRT_MODEL_INSTALL_DIR}/trt_test_models) endif() diff --git a/paddle/fluid/inference/tests/api/tester_helper.h b/paddle/fluid/inference/tests/api/tester_helper.h index a50810948ff8cb9e0bb92c287a7ab3945d39e089..6dda9ed0ec673baf455be9665a0ec0c63ccb3909 100644 --- a/paddle/fluid/inference/tests/api/tester_helper.h +++ b/paddle/fluid/inference/tests/api/tester_helper.h @@ -148,7 +148,7 @@ void CompareResult(const std::vector &outputs, case PaddleDType::INT64: { int64_t *pdata = static_cast(out.data.data()); int64_t *pdata_ref = ref_out.data(&place, &ref_size); - EXPECT_EQ(size, ref_size); + EXPECT_EQ(size, static_cast(ref_size)); for (size_t j = 0; j < size; ++j) { EXPECT_EQ(pdata_ref[j], pdata[j]); } diff --git a/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc b/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..14539a9d4e94b8a5735fe519587a78ded8193258 --- /dev/null +++ b/paddle/fluid/inference/tests/api/trt_mobilenet_test.cc @@ -0,0 +1,49 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include +#include + +#include "paddle/fluid/inference/tests/api/trt_test_helper.h" + +namespace paddle { +namespace inference { + +TEST(TensorRT_mobilenet, compare) { + std::string model_dir = FLAGS_infer_model + "/mobilenet"; + compare(model_dir, /* use_tensorrt */ true); + // Open it when need. + // profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt); +} + +TEST(AnalysisPredictor, use_gpu) { + std::string model_dir = FLAGS_infer_model + "/" + "mobilenet"; + AnalysisConfig config; + config.EnableUseGpu(100, 0); + config.SetModel(model_dir); + config.pass_builder()->TurnOnDebug(); + + std::vector> inputs_all; + auto predictor = CreatePaddlePredictor(config); + SetFakeImageInput(&inputs_all, model_dir, false, "__model__", ""); + + std::vector outputs; + for (auto& input : inputs_all) { + ASSERT_TRUE(predictor->Run(input, &outputs)); + } +} + +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/trt_resnet50_test.cc b/paddle/fluid/inference/tests/api/trt_resnet50_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..7dfcbb0d0d8a66f9159d7c63ea50cb59bee7b460 --- /dev/null +++ b/paddle/fluid/inference/tests/api/trt_resnet50_test.cc @@ -0,0 +1,30 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include +#include + +#include "paddle/fluid/inference/tests/api/trt_test_helper.h" + +namespace paddle { +namespace inference { + +TEST(resnet50, compare_continuous_input) { + std::string model_dir = FLAGS_infer_model + "/resnet50"; + compare_continuous_input(model_dir, true); +} + +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/trt_resnext_test.cc b/paddle/fluid/inference/tests/api/trt_resnext_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..588b5bffd74e540aa9f3bbdd4f5496c520290aa4 --- /dev/null +++ b/paddle/fluid/inference/tests/api/trt_resnext_test.cc @@ -0,0 +1,30 @@ +/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include +#include +#include + +#include "paddle/fluid/inference/tests/api/trt_test_helper.h" + +namespace paddle { +namespace inference { + +TEST(TensorRT_resnext50, compare) { + std::string model_dir = FLAGS_infer_model + "/resnext50"; + compare(model_dir, /* use_tensorrt */ true); +} + +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/trt_models_tester.cc b/paddle/fluid/inference/tests/api/trt_test_helper.h similarity index 81% rename from paddle/fluid/inference/tests/api/trt_models_tester.cc rename to paddle/fluid/inference/tests/api/trt_test_helper.h index ec10e36c3b3707a88eebe116aaf3de454fc199b5..0233cad0a65e9b1a8d0b54fd53660602b79c06cb 100644 --- a/paddle/fluid/inference/tests/api/trt_models_tester.cc +++ b/paddle/fluid/inference/tests/api/trt_test_helper.h @@ -11,10 +11,13 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ +#pragma once +#include +#include -#include -#include -#include +#include "gflags/gflags.h" +#include "glog/logging.h" +#include "gtest/gtest.h" #include "paddle/fluid/inference/tests/api/tester_helper.h" @@ -130,39 +133,5 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) { } } -TEST(TensorRT_mobilenet, compare) { - std::string model_dir = FLAGS_infer_model + "/mobilenet"; - compare(model_dir, /* use_tensorrt */ true); - // Open it when need. - // profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt); -} - -TEST(resnet50, compare_continuous_input) { - std::string model_dir = FLAGS_infer_model + "/resnet50"; - compare_continuous_input(model_dir, true); -} - -TEST(TensorRT_resnext50, compare) { - std::string model_dir = FLAGS_infer_model + "/resnext50"; - compare(model_dir, /* use_tensorrt */ true); -} - -TEST(AnalysisPredictor, use_gpu) { - std::string model_dir = FLAGS_infer_model + "/" + "mobilenet"; - AnalysisConfig config; - config.EnableUseGpu(100, 0); - config.SetModel(model_dir); - config.pass_builder()->TurnOnDebug(); - - std::vector> inputs_all; - auto predictor = CreatePaddlePredictor(config); - SetFakeImageInput(&inputs_all, model_dir, false, "__model__", ""); - - std::vector outputs; - for (auto& input : inputs_all) { - ASSERT_TRUE(predictor->Run(input, &outputs)); - } -} - } // namespace inference } // namespace paddle