diff --git a/paddle/fluid/inference/api/analysis_config.cc b/paddle/fluid/inference/api/analysis_config.cc index bf541b4d1b0a0aacb822a949b2478a1c306511d3..2beedaa2f4490610f602a39356dccddc232cb4dd 100644 --- a/paddle/fluid/inference/api/analysis_config.cc +++ b/paddle/fluid/inference/api/analysis_config.cc @@ -16,6 +16,7 @@ #include #include +#include "paddle/fluid/inference/api/helper.h" #include "paddle/fluid/inference/api/paddle_analysis_config.h" #include "paddle/fluid/inference/api/paddle_pass_builder.h" #include "paddle/fluid/inference/utils/table_printer.h" @@ -40,6 +41,12 @@ extern const std::vector kTRTSubgraphPasses; extern const std::vector kDlnneSubgraphPasses; extern const std::vector kLiteSubgraphPasses; +AnalysisConfig::AnalysisConfig() { + // NOTE(liuyuanle): Why put the following code here? + // ref to https://github.com/PaddlePaddle/Paddle/pull/50864 + inference::InitGflagsFromEnv(); +} + PassStrategy *AnalysisConfig::pass_builder() const { if (!pass_builder_.get()) { if (use_gpu_) { diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index e8888940a99ac939cae45757ebe26c961b21949b..7b12567a33f97e303c3fa9ff685183b31790e07b 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -1497,32 +1497,6 @@ CreatePaddlePredictor( if (std::getenv("FLAGS_initial_cpu_memory_in_mb") == nullptr) { SetGflag("initial_cpu_memory_in_mb", "0"); } - - // support set gflags from environment. - std::vector gflags; - const phi::ExportedFlagInfoMap &env_map = phi::GetExportedFlagInfoMap(); - std::ostringstream os; - for (auto &pair : env_map) { - os << pair.second.name << ","; - } - std::string tryfromenv_str = os.str(); - if (!tryfromenv_str.empty()) { - tryfromenv_str.pop_back(); - tryfromenv_str = "--tryfromenv=" + tryfromenv_str; - gflags.push_back(tryfromenv_str); - } - if (framework::InitGflags(gflags)) { - VLOG(3) - << "The following gpu analysis configurations only take effect " - "for the first predictor: "; - for (const auto &gflag : gflags) { - VLOG(3) << gflag; - } - } else { - LOG(WARNING) << "The one-time configuration of analysis predictor " - "failed, which may be due to native predictor called " - "first and its configurations taken effect."; - } }); if (config.thread_local_stream_enabled() && diff --git a/paddle/fluid/inference/api/helper.cc b/paddle/fluid/inference/api/helper.cc index 3454c5c8fd17b5221c4ace93d56a88fc574526d1..3fd8ed490fe458b40ad00f739c802ee87a688828 100644 --- a/paddle/fluid/inference/api/helper.cc +++ b/paddle/fluid/inference/api/helper.cc @@ -16,6 +16,7 @@ #include "paddle/fluid/framework/custom_operator.h" #include "paddle/fluid/framework/operator.h" +#include "paddle/fluid/platform/init.h" #include "paddle/phi/api/ext/op_meta_info.h" namespace paddle { @@ -59,5 +60,22 @@ void RegisterAllCustomOperator() { } } +void InitGflagsFromEnv() { + // support set gflags from environment. + std::vector gflags; + const phi::ExportedFlagInfoMap &env_map = phi::GetExportedFlagInfoMap(); + std::ostringstream os; + for (auto &pair : env_map) { + os << pair.second.name << ","; + } + std::string tryfromenv_str = os.str(); + if (!tryfromenv_str.empty()) { + tryfromenv_str.pop_back(); + tryfromenv_str = "--tryfromenv=" + tryfromenv_str; + gflags.push_back(tryfromenv_str); + } + framework::InitGflags(gflags); +} + } // namespace inference } // namespace paddle diff --git a/paddle/fluid/inference/api/helper.h b/paddle/fluid/inference/api/helper.h index de92281bb07a747860652e2167c176036871244f..92b6f12338f79742a42df3c04290a5c443243c70 100644 --- a/paddle/fluid/inference/api/helper.h +++ b/paddle/fluid/inference/api/helper.h @@ -432,6 +432,8 @@ static bool IsFileExists(const std::string &path) { void RegisterAllCustomOperator(); +void InitGflagsFromEnv(); + static inline double ToMegaBytes(size_t bytes) { return static_cast(bytes) / (1 << 20); } diff --git a/paddle/fluid/inference/api/paddle_analysis_config.h b/paddle/fluid/inference/api/paddle_analysis_config.h index 8e12f15e7a01819c7364238c85760d897a800eb3..c1a18694fe1206ad8020dd0dfdb686a20940784d 100644 --- a/paddle/fluid/inference/api/paddle_analysis_config.h +++ b/paddle/fluid/inference/api/paddle_analysis_config.h @@ -138,7 +138,7 @@ struct DistConfig { /// and loading it into AnalysisPredictor. /// struct PD_INFER_DECL AnalysisConfig { - AnalysisConfig() = default; + AnalysisConfig(); /// /// \brief Construct a new AnalysisConfig from another /// AnalysisConfig. diff --git a/paddle/fluid/platform/init.cc b/paddle/fluid/platform/init.cc index b53a65e7e61f3aadce703c6158388ce7c0101e6c..8410ea0b2192a7d3e97b65087ec46a1082d2a755 100644 --- a/paddle/fluid/platform/init.cc +++ b/paddle/fluid/platform/init.cc @@ -85,7 +85,6 @@ namespace framework { std::once_flag gflags_init_flag; std::once_flag glog_init_flag; -std::once_flag npu_init_flag; std::once_flag memory_method_init_flag; bool InitGflags(std::vector args) {