提交 35b4d42a 编写于 作者: T typhoonzero

merge doc fixes

上级 e163fd30
...@@ -49,7 +49,7 @@ static void CreateTensorFromMessageType(framework::Variable *var, ...@@ -49,7 +49,7 @@ static void CreateTensorFromMessageType(framework::Variable *var,
var->GetMutable<framework::SelectedRows>(); var->GetMutable<framework::SelectedRows>();
} else { } else {
PADDLE_THROW( PADDLE_THROW(
"VraibleMessage type %d is not in " "VariableMessage type %d is not in "
"[LoDTensor, SelectedRows]", "[LoDTensor, SelectedRows]",
var_type); var_type);
} }
...@@ -121,17 +121,17 @@ class RecvOp : public framework::OperatorBase { ...@@ -121,17 +121,17 @@ class RecvOp : public framework::OperatorBase {
if (it != grad_list.end()) { if (it != grad_list.end()) {
param_var_name = param_list[it - grad_list.begin()]; param_var_name = param_list[it - grad_list.begin()];
} else { } else {
LOG(ERROR) << "grad have no paired param:" << grad_var_name; LOG(ERROR) << "grad has no paired param:" << grad_var_name;
} }
VLOG(3) << "recved grad: " << grad_var_name VLOG(3) << "received grad: " << grad_var_name
<< " updating param: " << param_var_name; << " updating param: " << param_var_name;
if (fan_in > 1) { if (fan_in > 1) {
grad_var_name = this->GetGradVarNameForTrainer(grad_var_name); grad_var_name = this->GetGradVarNameForTrainer(grad_var_name);
} }
auto *var = recv_scope.FindVar(grad_var_name); auto *var = recv_scope.FindVar(grad_var_name);
if (var == nullptr) { if (var == nullptr) {
LOG(ERROR) << "can not find server side var: " << grad_var_name; LOG(ERROR) << "Can not find server side var: " << grad_var_name;
PADDLE_THROW("can not find server side var"); PADDLE_THROW("Can not find server side var");
} }
detail::DeserializeFromMessage(v.second, dev_ctx, var); detail::DeserializeFromMessage(v.second, dev_ctx, var);
} }
...@@ -164,7 +164,7 @@ class RecvOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -164,7 +164,7 @@ class RecvOpMaker : public framework::OpProtoAndCheckerMaker {
AddComment(R"DOC( AddComment(R"DOC(
Recv operator Recv operator
This operator will recv tensor from send_op This operator will recieve tensor from send_op
)DOC"); )DOC");
AddAttr<std::string>("endpoint", AddAttr<std::string>("endpoint",
"(string, default 127.0.0.1:6164)" "(string, default 127.0.0.1:6164)"
...@@ -175,11 +175,11 @@ This operator will recv tensor from send_op ...@@ -175,11 +175,11 @@ This operator will recv tensor from send_op
kOptimizeBlock, "Serialized ProgramDesc string for recv to run."); kOptimizeBlock, "Serialized ProgramDesc string for recv to run.");
AddAttr<std::vector<std::string>>( AddAttr<std::vector<std::string>>(
"ParamList", "type list of string", "ParamList", "type list of string",
"grad->param name mapping to find which param to optimize.") "grad->param name mapping to find which parameters to optimize.")
.SetDefault({}); .SetDefault({});
AddAttr<std::vector<std::string>>( AddAttr<std::vector<std::string>>(
"GradList", "type list of string", "GradList", "type list of string",
"grad->param name mapping to find which param to optimize.") "grad->param name mapping to find which parameters to optimize.")
.SetDefault({}); .SetDefault({});
AddAttr<int>("Fanin", "type int", AddAttr<int>("Fanin", "type int",
"Number of trainers in the current cluster job") "Number of trainers in the current cluster job")
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册