提交 756af4e7 编写于 作者: Y Yibing Liu

regulate comments in margin_rank_loss_op

上级 6b3e9ccb
......@@ -45,8 +45,8 @@ class MarginRankLossOpMaker : public framework::OpProtoAndCheckerMaker {
MarginRankLossOpMaker(framework::OpProto *proto,
framework::OpAttrChecker *op_checker)
: OpProtoAndCheckerMaker(proto, op_checker) {
AddInput("X1", "The first input of MarginRankLossOp, row vector.");
AddInput("X2", "The second input of MarginRankLossOp, row vector.");
AddInput("X1", "The first variable to be ranked, row vector.");
AddInput("X2", "The second variable to be ranked, row vector.");
AddInput("Label",
"The label indicating X1 ranked higher than X2 "
"or not, row vector.");
......@@ -54,16 +54,16 @@ class MarginRankLossOpMaker : public framework::OpProtoAndCheckerMaker {
.SetDefault(0);
AddOutput("Activated",
"Intermediate tensor to indicate whether each element of "
"Output(Out) is activated")
"Output(Out) is activated.")
.AsIntermediate();
AddOutput("Out", "The output loss of MarginRankLoss operator");
AddComment(R"DOC(
MarginRankLoss operator measures the loss given a pair of input {`X1`, `X2`}
and `Label` with attribuute `margin`, where `Label == 1` indicating X1 is
ranked higher than `X2`, otherwise `Label == -1`. The loss turns out
and the `Label` with attribute `margin`, where `Label = 1` indicating X1 is
ranked higher than `X2`, otherwise `Label = -1`. The loss turns out
loss(X1, X2, Label) = max(0, -Label * (X1-X2) + margin)
loss(X1, X2, Label) = max(0, -Label * (X1 - X2) + margin)
For batch input, `X1`, `X2` and `Label` all have the same size batch_size x 1.
......
......@@ -7,7 +7,7 @@ class TestMarginRankLossOp(OpTest):
def setUp(self):
self.op_type = "margin_rank_loss"
batch_size = 5
margin = 0.1
margin = 0.5
# labels_{i} = {-1, 1}
label = 2 * np.random.randint(
0, 2, size=(batch_size, 1)).astype("float32") - 1
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册