From e7d83389e61fdbfbf5f16db3fc7dd972b7589bd5 Mon Sep 17 00:00:00 2001 From: nhzlx Date: Wed, 9 Jan 2019 12:53:59 +0000 Subject: [PATCH] fix demo ci bug 1. trt_demo bug 2. trigger exit when exists a bug test=develop --- paddle/fluid/inference/api/demo_ci/run.sh | 4 ++++ paddle/fluid/inference/api/demo_ci/trt_mobilenet_demo.cc | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/inference/api/demo_ci/run.sh b/paddle/fluid/inference/api/demo_ci/run.sh index a94ccfa924..9811fe2cd0 100755 --- a/paddle/fluid/inference/api/demo_ci/run.sh +++ b/paddle/fluid/inference/api/demo_ci/run.sh @@ -116,6 +116,10 @@ D --modeldir=$DATA_DIR/mobilenet/model \ --data=$DATA_DIR/mobilenet/data.txt \ --refer=$DATA_DIR/mobilenet/result.txt + if [ $? -ne 0 ]; then + echo "trt demo trt_mobilenet_demo runs fail." + exit 1 + fi fi done set +x diff --git a/paddle/fluid/inference/api/demo_ci/trt_mobilenet_demo.cc b/paddle/fluid/inference/api/demo_ci/trt_mobilenet_demo.cc index 30215e480f..338a0cec16 100644 --- a/paddle/fluid/inference/api/demo_ci/trt_mobilenet_demo.cc +++ b/paddle/fluid/inference/api/demo_ci/trt_mobilenet_demo.cc @@ -38,8 +38,8 @@ void Main() { std::unique_ptr predictor; paddle::contrib::AnalysisConfig config; config.EnableUseGpu(100, 0); - config.SetModel(FLAGS_modeldir + "/__params__", - FLAGS_modeldir + "/__model__"); + config.SetModel(FLAGS_modeldir + "/__model__", + FLAGS_modeldir + "/__params__"); config.EnableTensorRtEngine(); predictor = CreatePaddlePredictor(config); -- GitLab