diff --git a/doc/fluid/design/dist_train/distributed_traing_review.md b/doc/fluid/design/dist_train/distributed_traing_review.md index 74066a3c2bf7b5e43b18c271de3e18271ee80925..c09b7c99159ace9b3df989f803ede20bc3585d92 100644 --- a/doc/fluid/design/dist_train/distributed_traing_review.md +++ b/doc/fluid/design/dist_train/distributed_traing_review.md @@ -42,7 +42,3 @@ Codistillation is a technique that tries to scale the training further. A few tr [3] Yonghui Wu, Mike Schuster, Zhifeng Chen, Quoc V Le, Mohammad Norouzi, Wolfgang Macherey, Maxim Krikun, Yuan Cao, Qin Gao, Klaus Macherey, et al. Google’s neural machine translation system: Bridging the gap between human and machine translation. [4] LARGE SCALE DISTRIBUTED NEURAL NETWORK TRAINING THROUGH ONLINE DISTILLATION - - - - diff --git a/paddle/fluid/operators/listen_and_serv_op.cc b/paddle/fluid/operators/listen_and_serv_op.cc index 59b94511552874e1557d8a9d7a687af14f96c31c..318d3a2ad3f569a881f5a2f5cf579fdcbd49262b 100644 --- a/paddle/fluid/operators/listen_and_serv_op.cc +++ b/paddle/fluid/operators/listen_and_serv_op.cc @@ -328,9 +328,8 @@ void ListenAndServOp::RunImpl(const framework::Scope &scope, rpc_service_->WaitServerReady(); // Write to a file of server selected port for python use. - std::string file_path = - string::Sprintf("/tmp/paddle.%d.selected_port", - static_cast(::getpid())); + std::string file_path = string::Sprintf("/tmp/paddle.%d.selected_port", + static_cast(::getpid())); SavePort(file_path); if (sync_mode) { RunSyncLoop(&executor, program, &recv_scope, prefetch_block);