From b593d588aa418d47831a6e19653c32b4be9d3c5f Mon Sep 17 00:00:00 2001 From: Wilber Date: Thu, 17 Dec 2020 20:34:47 +0800 Subject: [PATCH] [Inference] EnableUseGpu has higher priority than flags (#29697) * enable_use_gpu has higher priority than FLAGS * update. --- paddle/fluid/inference/api/analysis_config.cc | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/inference/api/analysis_config.cc b/paddle/fluid/inference/api/analysis_config.cc index fc56cd1546c..92e1404b6ad 100644 --- a/paddle/fluid/inference/api/analysis_config.cc +++ b/paddle/fluid/inference/api/analysis_config.cc @@ -18,6 +18,10 @@ #include "paddle/fluid/platform/enforce.h" #include "paddle/fluid/platform/gpu_info.h" +#ifdef PADDLE_WITH_CUDA +DECLARE_uint64(initial_gpu_memory_in_mb); +#endif + namespace paddle { struct MkldnnQuantizerConfig; @@ -68,6 +72,7 @@ void AnalysisConfig::EnableUseGpu(uint64_t memory_pool_init_size_mb, #ifdef PADDLE_WITH_CUDA use_gpu_ = true; memory_pool_init_size_mb_ = memory_pool_init_size_mb; + FLAGS_initial_gpu_memory_in_mb = memory_pool_init_size_mb_; device_id_ = device_id; #else LOG(ERROR) << "Please compile with gpu to EnableGpu()"; @@ -482,12 +487,16 @@ float AnalysisConfig::fraction_of_gpu_memory_for_pool() const { #ifdef PADDLE_WITH_CUDA // Get the GPU memory details and calculate the fraction of memory for the // GPU memory pool. - size_t gpu_used, gpu_available; + size_t gpu_total, gpu_available; platform::SetDeviceId(device_id_); - platform::GpuMemoryUsage(&gpu_used, &gpu_available); - double total_gpu_memory = (gpu_used + gpu_available) / 1024. / 1024.; + platform::GpuMemoryUsage(&gpu_available, &gpu_total); + double total_gpu_memory = gpu_total / 1024. / 1024.; float fraction_of_gpu_memory = static_cast(memory_pool_init_size_mb()) / total_gpu_memory; + VLOG(3) << "total_gpu_memory is " << total_gpu_memory + << "M, gpu_available is " << gpu_available / 1024. / 1024. + << "M, memory_pool_init_size is " << memory_pool_init_size_mb() + << "M."; return fraction_of_gpu_memory; #else return 0.; -- GitLab