diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 649118e80c4b6547f048a5a3902dbdd1ff9f9d06..b23d0841c9a5f96348edaff86db88be6e35ffe81 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -478,6 +478,10 @@ void AnalysisPredictor::OptimizeInferenceProgram() { template <> std::unique_ptr CreatePaddlePredictor< AnalysisConfig, PaddleEngineKind::kAnalysis>(const AnalysisConfig &config) { + if (config.glog_info_disabled()) { + FLAGS_logtostderr = 1; + FLAGS_minloglevel = 2; // GLOG_ERROR + } VLOG(3) << "create AnalysisConfig"; PADDLE_ENFORCE(config.is_valid(), "Note: Each config can only be used for one predictor."); @@ -509,11 +513,6 @@ std::unique_ptr CreatePaddlePredictor< framework::InitGflags(flags); } } - framework::InitGLOG(""); - if (config.glog_info_disabled()) { - FLAGS_logtostderr = 1; - FLAGS_minloglevel = 2; // GLOG_ERROR - } std::unique_ptr predictor(new AnalysisPredictor(config)); // Each config can only be used for one predictor.