From 756af4e73a0c1290052e8e2542b8ebc0ad6c5074 Mon Sep 17 00:00:00 2001 From: Yibing Liu Date: Fri, 22 Sep 2017 10:30:33 +0800 Subject: [PATCH] regulate comments in margin_rank_loss_op --- paddle/operators/margin_rank_loss_op.cc | 12 ++++++------ .../v2/framework/tests/test_margin_rank_loss_op.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/paddle/operators/margin_rank_loss_op.cc b/paddle/operators/margin_rank_loss_op.cc index 6869cedc8..47faaf716 100644 --- a/paddle/operators/margin_rank_loss_op.cc +++ b/paddle/operators/margin_rank_loss_op.cc @@ -45,8 +45,8 @@ class MarginRankLossOpMaker : public framework::OpProtoAndCheckerMaker { MarginRankLossOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { - AddInput("X1", "The first input of MarginRankLossOp, row vector."); - AddInput("X2", "The second input of MarginRankLossOp, row vector."); + AddInput("X1", "The first variable to be ranked, row vector."); + AddInput("X2", "The second variable to be ranked, row vector."); AddInput("Label", "The label indicating X1 ranked higher than X2 " "or not, row vector."); @@ -54,16 +54,16 @@ class MarginRankLossOpMaker : public framework::OpProtoAndCheckerMaker { .SetDefault(0); AddOutput("Activated", "Intermediate tensor to indicate whether each element of " - "Output(Out) is activated") + "Output(Out) is activated.") .AsIntermediate(); AddOutput("Out", "The output loss of MarginRankLoss operator"); AddComment(R"DOC( MarginRankLoss operator measures the loss given a pair of input {`X1`, `X2`} -and `Label` with attribuute `margin`, where `Label == 1` indicating X1 is -ranked higher than `X2`, otherwise `Label == -1`. The loss turns out +and the `Label` with attribute `margin`, where `Label = 1` indicating X1 is +ranked higher than `X2`, otherwise `Label = -1`. The loss turns out -loss(X1, X2, Label) = max(0, -Label * (X1-X2) + margin) +loss(X1, X2, Label) = max(0, -Label * (X1 - X2) + margin) For batch input, `X1`, `X2` and `Label` all have the same size batch_size x 1. diff --git a/python/paddle/v2/framework/tests/test_margin_rank_loss_op.py b/python/paddle/v2/framework/tests/test_margin_rank_loss_op.py index 2eb960534..63378cbc4 100644 --- a/python/paddle/v2/framework/tests/test_margin_rank_loss_op.py +++ b/python/paddle/v2/framework/tests/test_margin_rank_loss_op.py @@ -7,7 +7,7 @@ class TestMarginRankLossOp(OpTest): def setUp(self): self.op_type = "margin_rank_loss" batch_size = 5 - margin = 0.1 + margin = 0.5 # labels_{i} = {-1, 1} label = 2 * np.random.randint( 0, 2, size=(batch_size, 1)).astype("float32") - 1 -- GitLab