From 09016df8df61cff85a58c0dfd5a29e4feb575a97 Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Wed, 5 Sep 2018 21:03:53 +0800 Subject: [PATCH] make analyzer run --- paddle/fluid/inference/analysis/CMakeLists.txt | 14 +------------- .../inference/analysis/analyzer_lac_tester.cc | 10 ++++++++-- 2 files changed, 9 insertions(+), 15 deletions(-) diff --git a/paddle/fluid/inference/analysis/CMakeLists.txt b/paddle/fluid/inference/analysis/CMakeLists.txt index 43201fb0bb..dce74ee3f9 100644 --- a/paddle/fluid/inference/analysis/CMakeLists.txt +++ b/paddle/fluid/inference/analysis/CMakeLists.txt @@ -93,19 +93,7 @@ if (NOT EXISTS ${LAC_INSTALL_DIR} AND WITH_TESTING AND WITH_INFERENCE) endif() inference_analysis_test(test_analyzer_lac SRCS analyzer_lac_tester.cc - EXTRA_DEPS paddle_inference_api paddle_fluid_api ir_pass_manager analysis - analysis_predictor - # ir - fc_fuse_pass - fc_lstm_fuse_pass - seq_concat_fc_fuse_pass - graph_viz_pass - infer_clean_graph_pass - graph_pattern_detector - infer_clean_graph_pass - attention_lstm_fuse_pass - paddle_inference_api - pass + EXTRA_DEPS paddle_inference_api paddle_fluid_api ir_pass_manager analysis_predictor ARGS --infer_model=${LAC_INSTALL_DIR}/model --infer_data=${LAC_INSTALL_DIR}/data.txt) diff --git a/paddle/fluid/inference/analysis/analyzer_lac_tester.cc b/paddle/fluid/inference/analysis/analyzer_lac_tester.cc index 2aef25603f..5efee95030 100644 --- a/paddle/fluid/inference/analysis/analyzer_lac_tester.cc +++ b/paddle/fluid/inference/analysis/analyzer_lac_tester.cc @@ -16,6 +16,7 @@ #include #include "paddle/fluid/framework/ir/pass.h" #include "paddle/fluid/inference/analysis/ut_helper.h" +#include "paddle/fluid/inference/api/analysis_predictor.h" #include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/paddle_inference_api.h" #include "paddle/fluid/platform/profiler.h" @@ -170,9 +171,14 @@ void TestLACPrediction(const std::string &model_path, GetOneBatch(&input_slots, &data, batch_size); std::unique_ptr predictor; if (use_analysis) { + AnalysisConfig cfg; + cfg.model_dir = model_path; + cfg.use_gpu = false; + cfg.device = 0; + cfg.specify_input_name = true; + cfg.enable_ir_optim = true; predictor = - CreatePaddlePredictor( - config); + CreatePaddlePredictor(cfg); } else { predictor = CreatePaddlePredictor(config); -- GitLab