提交 1965a224 编写于 作者: R root 提交者: nhzlx

minus trt ci times.

test=develop
上级 a67fbffd
...@@ -116,7 +116,7 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) { ...@@ -116,7 +116,7 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) {
reinterpret_cast<const PaddlePredictor::Config*>(&analysis_config); reinterpret_cast<const PaddlePredictor::Config*>(&analysis_config);
auto native_pred = CreateTestPredictor(config, false); auto native_pred = CreateTestPredictor(config, false);
auto analysis_pred = CreateTestPredictor(config, true); auto analysis_pred = CreateTestPredictor(config, true);
for (int i = 0; i < 100; i++) { for (int i = 0; i < 20; i++) {
std::vector<std::vector<PaddleTensor>> inputs_all; std::vector<std::vector<PaddleTensor>> inputs_all;
if (!FLAGS_prog_filename.empty() && !FLAGS_param_filename.empty()) { if (!FLAGS_prog_filename.empty() && !FLAGS_param_filename.empty()) {
SetFakeImageInput(&inputs_all, model_dir, true, FLAGS_prog_filename, SetFakeImageInput(&inputs_all, model_dir, true, FLAGS_prog_filename,
...@@ -133,11 +133,13 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) { ...@@ -133,11 +133,13 @@ void compare_continuous_input(std::string model_dir, bool use_tensorrt) {
TEST(TensorRT_mobilenet, compare) { TEST(TensorRT_mobilenet, compare) {
std::string model_dir = FLAGS_infer_model + "/mobilenet"; std::string model_dir = FLAGS_infer_model + "/mobilenet";
compare(model_dir, /* use_tensorrt */ true); compare(model_dir, /* use_tensorrt */ true);
// Open it when need.
// profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
} }
TEST(TensorRT_resnet50, compare) { TEST(resnet50, compare_continuous_input) {
std::string model_dir = FLAGS_infer_model + "/resnet50"; std::string model_dir = FLAGS_infer_model + "/resnet50";
compare(model_dir, /* use_tensorrt */ true); compare_continuous_input(model_dir, true);
} }
TEST(TensorRT_resnext50, compare) { TEST(TensorRT_resnext50, compare) {
...@@ -145,24 +147,6 @@ TEST(TensorRT_resnext50, compare) { ...@@ -145,24 +147,6 @@ TEST(TensorRT_resnext50, compare) {
compare(model_dir, /* use_tensorrt */ true); compare(model_dir, /* use_tensorrt */ true);
} }
TEST(TensorRT_resnext50, profile) {
std::string model_dir = FLAGS_infer_model + "/resnext50";
// Set FLAGS_record_benchmark to true to record benchmark to file.
// FLAGS_record_benchmark=true;
FLAGS_model_name = "resnext50";
profile(model_dir, /* use_analysis */ true, FLAGS_use_tensorrt);
}
TEST(resnext50, compare_analysis_native) {
std::string model_dir = FLAGS_infer_model + "/resnext50";
compare(model_dir, false /*use tensorrt*/);
}
TEST(TensorRT_mobilenet, analysis) {
std::string model_dir = FLAGS_infer_model + "/" + "mobilenet";
compare(model_dir, false /* use_tensorrt */);
}
TEST(AnalysisPredictor, use_gpu) { TEST(AnalysisPredictor, use_gpu) {
std::string model_dir = FLAGS_infer_model + "/" + "mobilenet"; std::string model_dir = FLAGS_infer_model + "/" + "mobilenet";
AnalysisConfig config; AnalysisConfig config;
...@@ -180,20 +164,5 @@ TEST(AnalysisPredictor, use_gpu) { ...@@ -180,20 +164,5 @@ TEST(AnalysisPredictor, use_gpu) {
} }
} }
TEST(TensorRT_mobilenet, profile) {
std::string model_dir = FLAGS_infer_model + "/" + "mobilenet";
profile(model_dir, true, false);
}
TEST(resnet50, compare_continuous_input) {
std::string model_dir = FLAGS_infer_model + "/resnet50";
compare_continuous_input(model_dir, true);
}
TEST(resnet50, compare_continuous_input_native) {
std::string model_dir = FLAGS_infer_model + "/resnet50";
compare_continuous_input(model_dir, false);
}
} // namespace inference } // namespace inference
} // namespace paddle } // namespace paddle
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册