diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 4e8d2094e2a53fb544c6904dd6922fe4805b86db..6319d99ab81100bdb36240bdb13112dad594f705 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -477,6 +477,10 @@ void AnalysisPredictor::OptimizeInferenceProgram() { template <> std::unique_ptr CreatePaddlePredictor< AnalysisConfig, PaddleEngineKind::kAnalysis>(const AnalysisConfig &config) { + if (config.glog_info_disabled()) { + FLAGS_logtostderr = 1; + FLAGS_minloglevel = 2; // GLOG_ERROR + } VLOG(3) << "create AnalysisConfig"; PADDLE_ENFORCE(config.is_valid(), "Note: Each config can only be used for one predictor."); @@ -508,11 +512,6 @@ std::unique_ptr CreatePaddlePredictor< framework::InitGflags(flags); } } - framework::InitGLOG(""); - if (config.glog_info_disabled()) { - FLAGS_logtostderr = 1; - FLAGS_minloglevel = 2; // GLOG_ERROR - } std::unique_ptr predictor(new AnalysisPredictor(config)); // Each config can only be used for one predictor.