From b854d959a543ee83e89a77d0627fb375bf0f9ba1 Mon Sep 17 00:00:00 2001 From: Tao Luo Date: Wed, 17 Oct 2018 15:58:37 +0800 Subject: [PATCH] update with comments --- .../fluid/inference/tests/api/analyzer_resnet50_tester.cc | 8 +++++--- paddle/fluid/inference/tests/api/analyzer_vis_tester.cc | 8 +++++--- paddle/fluid/inference/tests/api/tester_helper.h | 2 +- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc index 050f267fff..92cc76d3ce 100644 --- a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc @@ -20,7 +20,7 @@ namespace paddle { namespace inference { namespace analysis { -void SetConfig(AnalysisConfig *cfg, bool _use_mkldnn = FLAGS__use_mkldnn) { +void SetConfig(AnalysisConfig *cfg) { cfg->param_file = FLAGS_infer_model + "/params"; cfg->prog_file = FLAGS_infer_model + "/model"; cfg->use_gpu = false; @@ -28,7 +28,7 @@ void SetConfig(AnalysisConfig *cfg, bool _use_mkldnn = FLAGS__use_mkldnn) { cfg->enable_ir_optim = true; cfg->specify_input_name = true; #ifdef PADDLE_WITH_MKLDNN - cfg->_use_mkldnn = _use_mkldnn; + cfg->_use_mkldnn = FLAGS_use_MKLDNN; #endif } @@ -96,9 +96,11 @@ TEST(Analyzer_resnet50, compare) { // since default config._use_mkldnn=true in this case, // we should compare analysis_outputs in config._use_mkldnn=false // with native_outputs as well. + FLAGS_use_MKLDNN = false; AnalysisConfig cfg1; - SetConfig(&cfg1, false); + SetConfig(&cfg1); CompareNativeAndAnalysis(cfg1, input_slots_all); + FLAGS_use_MKLDNN = true; #endif } diff --git a/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc b/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc index 07398ed26c..96a3c6ff24 100644 --- a/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_vis_tester.cc @@ -50,7 +50,7 @@ Record ProcessALine(const std::string &line) { return record; } -void SetConfig(AnalysisConfig *cfg, bool _use_mkldnn = FLAGS__use_mkldnn) { +void SetConfig(AnalysisConfig *cfg) { cfg->param_file = FLAGS_infer_model + "/__params__"; cfg->prog_file = FLAGS_infer_model + "/__model__"; cfg->use_gpu = false; @@ -60,7 +60,7 @@ void SetConfig(AnalysisConfig *cfg, bool _use_mkldnn = FLAGS__use_mkldnn) { // TODO(TJ): fix fusion gru cfg->ir_passes.push_back("fc_gru_fuse_pass"); #ifdef PADDLE_WITH_MKLDNN - cfg->_use_mkldnn = _use_mkldnn; + cfg->_use_mkldnn = FLAGS_use_MKLDNN; #endif } @@ -129,9 +129,11 @@ TEST(Analyzer_vis, compare) { // since default config._use_mkldnn=true in this case, // we should compare analysis_outputs in config._use_mkldnn=false // with native_outputs as well. + FLAGS_use_MKLDNN = false; AnalysisConfig cfg1; - SetConfig(&cfg1, false); + SetConfig(&cfg1); CompareNativeAndAnalysis(cfg1, input_slots_all); + FLAGS_use_MKLDNN = true; #endif } diff --git a/paddle/fluid/inference/tests/api/tester_helper.h b/paddle/fluid/inference/tests/api/tester_helper.h index fe3ee5bcd7..df9d017567 100644 --- a/paddle/fluid/inference/tests/api/tester_helper.h +++ b/paddle/fluid/inference/tests/api/tester_helper.h @@ -35,7 +35,7 @@ DEFINE_bool(test_all_data, false, "Test the all dataset in data file."); DEFINE_int32(num_threads, 1, "Running the inference program in multi-threads."); DEFINE_bool(use_analysis, true, "Running the inference program in analysis mode."); -DEFINE_bool(_use_mkldnn, true, +DEFINE_bool(use_MKLDNN, true, "Running the inference program with mkldnn library."); namespace paddle { -- GitLab