From 40aee48a8077ba2757d8c7905d9744e2b425bca4 Mon Sep 17 00:00:00 2001 From: caoying03 Date: Tue, 26 Sep 2017 18:34:43 +0800 Subject: [PATCH] follow comments. --- .../softmax_with_cross_entropy_op.cc | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/paddle/operators/softmax_with_cross_entropy_op.cc b/paddle/operators/softmax_with_cross_entropy_op.cc index cb2aa3005..b6f33ad9e 100644 --- a/paddle/operators/softmax_with_cross_entropy_op.cc +++ b/paddle/operators/softmax_with_cross_entropy_op.cc @@ -23,11 +23,6 @@ class SoftmaxWithCrossEntropyOpMaker SoftmaxWithCrossEntropyOpMaker(framework::OpProto* proto, framework::OpAttrChecker* op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { - AddAttr( - "softLabel", - "(bool, default: false), A flag to indicate whether to interpretate " - "the given labels as soft labels.") - .SetDefault(false); AddInput("Logits", "(Tensor, default: Tensor), The unscaled log probabilities " "which is a 2-D tensor with shape [N x K]. N is the batch_size, " @@ -49,6 +44,11 @@ class SoftmaxWithCrossEntropyOpMaker AddOutput("Loss", "(Tensor, default: Tensor), A 2-D tensor. The cross " "entropy loss with shape [N x 1]."); + AddAttr( + "softLabel", + "(bool, default: false), A flag to indicate whether to interpretate " + "the given labels as soft labels.") + .SetDefault(false); AddComment(R"DOC( Cross entropy loss with softmax are used as the output layer extensively. This operator computes the softmax normalized values for each row of the input @@ -95,18 +95,18 @@ class SoftmaxWithCrossEntropyOp : public framework::OperatorWithKernel { const Tensor* logits = ctx.Input("Logits"); const Tensor* labels = ctx.Input("Label"); - PADDLE_ENFORCE( - logits->dims().size() == 2UL, + PADDLE_ENFORCE_EQ( + logits->dims().size(), 2UL, "The input of softmax_with_cross_entropy should be a 2-D tensor."); - PADDLE_ENFORCE(ctx.Input("Label")->dims().size() == 2UL, - "The labels should be a 2-D tensor."); + PADDLE_ENFORCE_EQ(ctx.Input("Label")->dims().size(), 2UL, + "The labels should be a 2-D tensor."); if (ctx.Attr("softLabel")) { PADDLE_ENFORCE_EQ(logits->dims()[1], labels->dims()[1], "If Attr(softLabel) == true, the 2nd dimension of " "Input(X) and Input(Label) should be equal."); } else { - PADDLE_ENFORCE_EQ(labels->dims()[1], 1, + PADDLE_ENFORCE_EQ(labels->dims()[1], 1UL, "If Attr(softLabel) == false, the 2nd dimension of " "Input(Label) should be 1."); } @@ -130,21 +130,21 @@ class SoftmaxWithCrossEntropyOpGrad : public framework::OperatorWithKernel { PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Softmax"), "Input(Softmax) should be not null."); PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Label"), - "Input(Lable) should be not null."); + "Input(Label) should be not null."); PADDLE_ENFORCE_NOT_NULL(ctx.OutputVar(framework::GradVarName("Logits")), "Output(Logits@Grad) should be not null."); const Tensor* softmax = ctx.Input("Softmax"); const Tensor* labels = ctx.Input("Label"); - PADDLE_ENFORCE(ctx.Input("Label")->dims().size() == 2UL, - "The labels should be a 2-D tensor."); + PADDLE_ENFORCE_EQ(ctx.Input("Label")->dims().size(), 2UL, + "The labels should be a 2-D tensor."); if (ctx.Attr("softLabel")) { PADDLE_ENFORCE_EQ(softmax->dims()[1], labels->dims()[1], "When Attr(softLabel) == true, the 2nd dimension of " "Input(X) and Input(Label) should be equal."); } else { - PADDLE_ENFORCE_EQ(labels->dims()[1], 1, + PADDLE_ENFORCE_EQ(labels->dims()[1], 1UL, "When Attr(softLabel) == false, the 2nd dimension of " "Input(Label) should be 1."); } -- GitLab