From 1078e0648fd1fc606595845f7a237403c739763e Mon Sep 17 00:00:00 2001 From: zmxdream Date: Thu, 29 Dec 2022 11:52:45 +0800 Subject: [PATCH] [pglbox2.0]fix load into memory (#49389) * fix load into memory * fix load into memory * fix code style --- paddle/fluid/framework/fleet/ps_gpu_wrapper.cc | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/paddle/fluid/framework/fleet/ps_gpu_wrapper.cc b/paddle/fluid/framework/fleet/ps_gpu_wrapper.cc index 87704f07790..4b7034aa5c4 100644 --- a/paddle/fluid/framework/fleet/ps_gpu_wrapper.cc +++ b/paddle/fluid/framework/fleet/ps_gpu_wrapper.cc @@ -144,7 +144,7 @@ void PSGPUWrapper::add_key_to_local(const std::vector& vec_data) { iter++) { uint64_t cur_key = *iter; int shard_id = cur_key % thread_keys_shard_num_; - // TODO: feasign <-> slot <-> multi_dim + // TODO(lxsbupt): feasign <-> slot <-> multi_dim this->thread_dim_keys_[i][shard_id][0].insert(cur_key); } }; @@ -1304,6 +1304,7 @@ void PSGPUWrapper::LoadIntoMemory(bool is_shuffle) { } InitSlotInfo(); +#if defined(PADDLE_WITH_GPU_GRAPH) && defined(PADDLE_WITH_HETERPS) if (FLAGS_gpugraph_storage_mode != GpuGraphStorageMode::WHOLE_HBM) { std::shared_ptr gpu_task = gpu_task_pool_.Get(); gpu_task->Reset(); @@ -1312,7 +1313,12 @@ void PSGPUWrapper::LoadIntoMemory(bool is_shuffle) { } else if (hbm_sparse_table_initialized_ == false) { SparseTableToHbm(); } - +#else + std::shared_ptr gpu_task = gpu_task_pool_.Get(); + gpu_task->Reset(); + gpu_task->pass_id_ = (uint16_t)(dataset_->GetPassID()); + data_ready_channel_->Put(gpu_task); +#endif VLOG(3) << "End LoadIntoMemory(), dataset[" << dataset_ << "]"; } @@ -1544,7 +1550,7 @@ void PSGPUWrapper::HbmToSparseTable() { float* gpu_val = reinterpret_cast(test_build_values + local_offset); #ifdef PADDLE_WITH_PSLIB - // TODO: PSLIB DumpFill + // TODO(lxsbupt): PSLIB DumpFill #endif #ifdef PADDLE_WITH_PSCORE accessor_wrapper_ptr->DumpFill(gpu_val, cpu_table_accessor_, mf_dim); -- GitLab