提交 6de9ebc6 编写于 作者: D dongdaxiang

refine VLOG in fleet_wrapper.h

test=develop
上级 97d5cd30
...@@ -42,13 +42,13 @@ std::shared_ptr<paddle::distributed::PSlib> FleetWrapper::pslib_ptr_ = NULL; ...@@ -42,13 +42,13 @@ std::shared_ptr<paddle::distributed::PSlib> FleetWrapper::pslib_ptr_ = NULL;
void FleetWrapper::InitServer(const std::string& dist_desc, int index) { void FleetWrapper::InitServer(const std::string& dist_desc, int index) {
#ifdef PADDLE_WITH_PSLIB #ifdef PADDLE_WITH_PSLIB
if (!is_initialized_) { if (!is_initialized_) {
LOG(WARNING) << "Going to init server"; VLOG(3) << "Going to init server";
pslib_ptr_ = std::shared_ptr<paddle::distributed::PSlib>( pslib_ptr_ = std::shared_ptr<paddle::distributed::PSlib>(
new paddle::distributed::PSlib()); new paddle::distributed::PSlib());
pslib_ptr_->init_server(dist_desc, index); pslib_ptr_->init_server(dist_desc, index);
is_initialized_ = true; is_initialized_ = true;
} else { } else {
LOG(WARNING) << "Server can be initialized only once"; VLOG(3) << "Server can be initialized only once";
} }
#endif #endif
} }
...@@ -58,7 +58,7 @@ void FleetWrapper::InitWorker(const std::string& dist_desc, ...@@ -58,7 +58,7 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
int node_num, int index) { int node_num, int index) {
#ifdef PADDLE_WITH_PSLIB #ifdef PADDLE_WITH_PSLIB
if (!is_initialized_) { if (!is_initialized_) {
LOG(WARNING) << "Going to init server"; VLOG(3) << "Going to init worker";
pslib_ptr_ = std::shared_ptr<paddle::distributed::PSlib>( pslib_ptr_ = std::shared_ptr<paddle::distributed::PSlib>(
new paddle::distributed::PSlib()); new paddle::distributed::PSlib());
pslib_ptr_->init_worker(dist_desc, pslib_ptr_->init_worker(dist_desc,
...@@ -66,21 +66,21 @@ void FleetWrapper::InitWorker(const std::string& dist_desc, ...@@ -66,21 +66,21 @@ void FleetWrapper::InitWorker(const std::string& dist_desc,
node_num, index); node_num, index);
is_initialized_ = true; is_initialized_ = true;
} else { } else {
LOG(WARNING) << "Worker can be initialized only once"; VLOG(3) << "Worker can be initialized only once";
} }
#endif #endif
} }
void FleetWrapper::StopServer() { void FleetWrapper::StopServer() {
#ifdef PADDLE_WITH_PSLIB #ifdef PADDLE_WITH_PSLIB
LOG(WARNING) << "Going to stop server"; VLOG(3) << "Going to stop server";
pslib_ptr_->stop_server(); pslib_ptr_->stop_server();
#endif #endif
} }
uint64_t FleetWrapper::RunServer() { uint64_t FleetWrapper::RunServer() {
#ifdef PADDLE_WITH_PSLIB #ifdef PADDLE_WITH_PSLIB
LOG(WARNING) << "Going to run server"; VLOG(3) << "Going to run server";
return pslib_ptr_->run_server(); return pslib_ptr_->run_server();
#else #else
return 0; return 0;
...@@ -90,7 +90,7 @@ uint64_t FleetWrapper::RunServer() { ...@@ -90,7 +90,7 @@ uint64_t FleetWrapper::RunServer() {
void FleetWrapper::GatherServers(const std::vector<uint64_t>& host_sign_list, void FleetWrapper::GatherServers(const std::vector<uint64_t>& host_sign_list,
int node_num) { int node_num) {
#ifdef PADDLE_WITH_PSLIB #ifdef PADDLE_WITH_PSLIB
LOG(WARNING) << "Going to gather server ips"; VLOG(3) << "Going to gather server ips";
pslib_ptr_->gather_servers(const_cast<uint64_t*>(host_sign_list.data()), pslib_ptr_->gather_servers(const_cast<uint64_t*>(host_sign_list.data()),
node_num); node_num);
#endif #endif
......
...@@ -39,6 +39,7 @@ void MultiTrainer::Initialize(const TrainerDesc& trainer_desc) { ...@@ -39,6 +39,7 @@ void MultiTrainer::Initialize(const TrainerDesc& trainer_desc) {
for (unsigned i = 0; i < trainer_desc.filelist_size(); ++i) { for (unsigned i = 0; i < trainer_desc.filelist_size(); ++i) {
filelist_vec.push_back(trainer_desc.filelist(i)); filelist_vec.push_back(trainer_desc.filelist(i));
} }
readers_[0]->SetFileList(filelist_vec);
} }
// call only after all resources are set in current trainer // call only after all resources are set in current trainer
......
...@@ -29,7 +29,9 @@ class TrainerDesc(object): ...@@ -29,7 +29,9 @@ class TrainerDesc(object):
text_format.Parse(f.read(), self.proto_desc) text_format.Parse(f.read(), self.proto_desc)
''' '''
self.proto_desc = trainer_desc_pb2.TrainerDesc() self.proto_desc = trainer_desc_pb2.TrainerDesc()
self.proto_desc.thread_num = 12 import multiprocessing as mp
# set default thread num == cpu count
self.proto_desc.thread_num = mp.cpu_count()
def set_thread(self, thread_num): def set_thread(self, thread_num):
self.proto_desc.thread_num = thread_num self.proto_desc.thread_num = thread_num
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册