未验证 提交 39a1ab69 编写于 作者: C chenxujun 提交者: GitHub

Fix typos (#51338)

上级 079f41c8
......@@ -129,7 +129,7 @@ ncclComm_t ProcessGroupNCCL::NCCLComm(const Place& place) const {
iter,
place_to_comm_ctx_.end(),
phi::errors::NotFound(
"Cannot find the NCCL commmunicator in this process group."));
"Cannot find the NCCL communicator in this process group."));
return iter->second->nccl_comm();
}
......
......@@ -803,7 +803,7 @@ void EagerReducer::MarkVarReady(const size_t var_index,
"parameters participate in the backward calculation "
"again at a later time (e.g. after the forward function, "
"the loss calculation uses the unused "
"paramters of the forward and trigger backward), "
"parameters of the forward and trigger backward), "
"its gradient will be wrong.";
PADDLE_ENFORCE_EQ(has_marked_unused_vars_,
......@@ -868,7 +868,7 @@ void EagerReducer::MarkVarReady(const size_t var_index,
"parameters without generating gradients during training. "
"For example, if is_sparese=True is used in Embedding, "
"the current step of this parameter cannot generate gradient "
"because of stop_gradient/detatch, where error will occur.",
"because of stop_gradient/detach, where error will occur.",
var_index,
tensors_[var_index].name()));
......@@ -996,7 +996,7 @@ void EagerReducer::ProcessUnusedDenseVars() {
// NOTE(haohongxiang): Calling SetFakeEmpty here is to make sure that
// gradient accumulation can continue normally after clear_gradients()
// especiall in cases including complex control flow.
// especially in cases including complex control flow.
std::static_pointer_cast<egr::GradNodeAccumulation>(
GetGradNodeFromTensor(&tensors_[var_index]))
->SetFakeEmpty(false);
......
......@@ -192,7 +192,7 @@ void ComputeInterceptor::RunOps() {
microbatch_scopes_.size(),
platform::errors::InvalidArgument(
"Step out of range. There are %ld "
"microbatch_scopes, but recevice scope index %ld",
"microbatch_scopes, but receive scope index %ld",
microbatch_scopes_.size(),
cur_scope_id_));
}
......
......@@ -52,7 +52,7 @@ int TreeIndex::Load(const std::string filename) {
platform::errors::InvalidArgument(
"Read from file: %s failed. Valid Format is "
"an integer representing the length of the following string, "
"and the string itself.We got an iteger[% d], "
"and the string itself.We got an integer[% d], "
"but the following string's length is [%d].",
filename,
num,
......@@ -75,7 +75,7 @@ int TreeIndex::Load(const std::string filename) {
// PADDLE_ENFORCE_NE(node.id(), 0,
// platform::errors::InvalidArgument(
// "Node'id should not be equel to zero."));
// "Node'id should not be equal to zero."));
if (node.is_leaf()) {
id_codes_map_[node.id()] = code;
}
......
......@@ -81,7 +81,7 @@ message ServerServiceParameter {
optional string server_class = 1 [ default = "DownpourBrpcPsServer" ];
optional string client_class = 2 [ default = "DownpourBrpcPsClient" ];
optional string service_class = 3 [ default = "DownpourPsService"];
optional uint32 start_server_port = 4 [ default = 0 ]; //will find a avaliable port from it
optional uint32 start_server_port = 4 [ default = 0 ]; //will find a available port from it
optional uint32 server_thread_num = 5 [ default = 12 ];
}
......
......@@ -174,7 +174,7 @@ void Communicator::RpcSendDenseParam(const std::vector<std::string> &varnames,
float *w = tensor->mutable_data<float>(place);
paddle::distributed::Region reg(w, tensor->numel());
regions.emplace_back(reg);
VLOG(1) << "rpc_send_dense_param Var " << t << " talbe_id " << table_id
VLOG(1) << "rpc_send_dense_param Var " << t << " table_id " << table_id
<< " Temp_data[0] " << w[0] << " Temp_data[-1] "
<< w[tensor->numel() - 1];
}
......@@ -1514,7 +1514,7 @@ void FLCommunicator::InitBrpcClient(
if (_worker_ptr.get() == nullptr) {
VLOG(0) << "fl-ps > FLCommunicator::InitBrpcClient get _worker_ptr";
_worker_ptr =
fleet->worker_ptr_; // FleetWrapper::InitWorker must be excuted
fleet->worker_ptr_; // FleetWrapper::InitWorker must be executed
// before, but no need for Coordinator
}
if (coordinator_client_ptr_ == nullptr) {
......
......@@ -277,7 +277,7 @@ class Communicator {
virtual void RpcRecvSparse(const std::string &varname,
int table_id,
Scope *scope);
// 7. send gloabl step
// 7. send global step
virtual void SendGlobalStep(const CommContext &ctx,
int batches,
Scope *send_scope);
......@@ -572,7 +572,7 @@ class SyncCommunicator : public HalfAsyncCommunicator {
: HalfAsyncCommunicator(envs) {}
void InitEnvs() {
// enfore to recv after send
// enforce to recv after send
independent_recv_ = false;
min_send_grad_num_before_recv_ = 0;
max_merge_var_num_ = std::stoi(envs.at("communicator_max_merge_var_num"));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册