未验证 提交 b7eeef24 编写于 作者: K kavyasrinet 提交者: GitHub

Updating the comments for send_op and recv_op. (#7747)

* Updating the cluster trainign doc

* Fixed comments

* Updating few comments in recv_op
上级 6ae46a29
......@@ -49,7 +49,7 @@ static void CreateTensorFromMessageType(framework::Variable *var,
var->GetMutable<framework::SelectedRows>();
} else {
PADDLE_THROW(
"VraibleMessage type %d is not in "
"VariableMessage type %d is not in "
"[LoDTensor, SelectedRows]",
var_type);
}
......@@ -121,17 +121,17 @@ class RecvOp : public framework::OperatorBase {
if (it != grad_list.end()) {
param_var_name = param_list[it - grad_list.begin()];
} else {
LOG(ERROR) << "grad have no paired param:" << grad_var_name;
LOG(ERROR) << "grad has no paired param:" << grad_var_name;
}
VLOG(3) << "recved grad: " << grad_var_name
VLOG(3) << "received grad: " << grad_var_name
<< " updating param: " << param_var_name;
if (fan_in > 1) {
grad_var_name = this->GetGradVarNameForTrainer(grad_var_name);
}
auto *var = recv_scope.FindVar(grad_var_name);
if (var == nullptr) {
LOG(ERROR) << "can not find server side var: " << grad_var_name;
PADDLE_THROW("can not find server side var");
LOG(ERROR) << "Can not find server side var: " << grad_var_name;
PADDLE_THROW("Can not find server side var");
}
detail::DeserializeFromMessage(v.second, dev_ctx, var);
}
......@@ -165,7 +165,7 @@ class RecvOpMaker : public framework::OpProtoAndCheckerMaker {
AddComment(R"DOC(
Recv operator
This operator will recv tensor from send_op
This operator will recieve tensor from send_op
)DOC");
AddAttr<std::string>("endpoint",
"(string, default 127.0.0.1:6164)"
......@@ -176,11 +176,11 @@ This operator will recv tensor from send_op
kOptimizeBlock, "Serialized ProgramDesc string for recv to run.");
AddAttr<std::vector<std::string>>(
"ParamList", "type list of string",
"grad->param name mapping to find which param to optimize.")
"grad->param name mapping to find which parameters to optimize.")
.SetDefault({});
AddAttr<std::vector<std::string>>(
"GradList", "type list of string",
"grad->param name mapping to find which param to optimize.")
"grad->param name mapping to find which parameters to optimize.")
.SetDefault({});
AddAttr<int>("Fanin", "type int",
"Number of trainers in the current cluster job")
......
......@@ -62,13 +62,13 @@ class SendOpMaker : public framework::OpProtoAndCheckerMaker {
public:
SendOpMaker(OpProto* proto, OpAttrChecker* op_checker)
: OpProtoAndCheckerMaker(proto, op_checker) {
AddInput("X", "(Tensor) Input tensor to be send").AsDuplicable();
AddOutput("Out", "(Tensor) Output tensor to get from server")
AddInput("X", "(Tensor) Input tensor to be sent").AsDuplicable();
AddOutput("Out", "(Tensor) Output tensor to be received from server")
.AsDuplicable();
AddComment(R"DOC(
Send operator
This operator will send tensor to recv_op.
This operator will send tensor to recv_op at the parameter server.
)DOC");
AddAttr<std::vector<std::string>>("endpoints",
"(string vector, default 127.0.0.1:6164)"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册