From 771c3b297f7788492a06ccfc6c16777bb885c2ea Mon Sep 17 00:00:00 2001 From: Liufang Sang Date: Sat, 18 Apr 2020 08:57:31 -0500 Subject: [PATCH] update enhance error message for Initializer, smooth_l1 (#23912) * update enhance error message test=develop * update details test=develop --- paddle/fluid/operators/gaussian_random_op.cc | 10 +++-- paddle/fluid/operators/smooth_l1_loss_op.cc | 42 +++++++++++++------- 2 files changed, 33 insertions(+), 19 deletions(-) diff --git a/paddle/fluid/operators/gaussian_random_op.cc b/paddle/fluid/operators/gaussian_random_op.cc index 6357df94eb0..e99da6e3412 100644 --- a/paddle/fluid/operators/gaussian_random_op.cc +++ b/paddle/fluid/operators/gaussian_random_op.cc @@ -58,10 +58,12 @@ class GaussianRandomOp : public framework::OperatorWithKernel { for (auto dim : shape) { temp.push_back(static_cast(dim)); } - PADDLE_ENFORCE_GT(shape.size(), 0UL, - platform::errors::InvalidArgument( - "Attribute(shape) of GaussianRandomOp must be set " - "and shape.size() > 0.")); + PADDLE_ENFORCE_GT( + shape.size(), 0UL, + platform::errors::InvalidArgument( + "Attribute(shape) of GaussianRandomOp must be set " + "and shape.size() > 0, but reveived shape.size() is %d", + shape.size())); ctx->SetOutputDim("Out", framework::make_ddim(temp)); } diff --git a/paddle/fluid/operators/smooth_l1_loss_op.cc b/paddle/fluid/operators/smooth_l1_loss_op.cc index 59de629a687..ea11529c905 100644 --- a/paddle/fluid/operators/smooth_l1_loss_op.cc +++ b/paddle/fluid/operators/smooth_l1_loss_op.cc @@ -34,15 +34,18 @@ class SmoothL1LossOp : public framework::OperatorWithKernel { check = false; } if (check) { - PADDLE_ENFORCE_EQ(x_dims, y_dims, - platform::errors::InvalidArgument( - "Input(X) ans Input(Y) of SmoothL1LossOp should " - "have the same size")); + PADDLE_ENFORCE_EQ( + x_dims, y_dims, + platform::errors::InvalidArgument( + "Input(X) ans Input(Y) of SmoothL1LossOp should " + "have the same size, but received X dim is %s, Y dim is %s", + x_dims.to_str(), y_dims.to_str())); } PADDLE_ENFORCE_GE(x_dims.size(), 2, platform::errors::InvalidArgument( "The tensor rank of Input(X) of SmoothL1LossOp " - "should not be less than 2.")); + "should not be less than 2, but received %d.", + x_dims.size())); if (ctx->HasInput("InsideWeight")) { PADDLE_ENFORCE_EQ(ctx->HasInput("OutsideWeight"), true, platform::errors::InvalidArgument( @@ -58,7 +61,9 @@ class SmoothL1LossOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_EQ(x_dims, dims, platform::errors::InvalidArgument( "Input(X) ans Input(InsideWeight) of " - "SmoothL1LossOp should have the same size")); + "SmoothL1LossOp should have the same size, but " + "received X dim is %s, InsideWeight dim is %s", + x_dims.to_str(), dims.to_str())); } dims = ctx->GetInputDim("OutsideWeight"); @@ -71,7 +76,9 @@ class SmoothL1LossOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_EQ(x_dims, dims, platform::errors::InvalidArgument( "Input(X) ans Input(OutsideWeight) of " - "SmoothL1LossOp should have the same size")); + "SmoothL1LossOp should have the same size, but " + "received X dim is %s, OutsideWeight dim is %s", + x_dims.to_str(), dims.to_str())); } } @@ -145,18 +152,23 @@ class SmoothL1LossGradOp : public framework::OperatorWithKernel { auto in_dims = ctx->GetInputDim("Diff"); auto out_dims = ctx->GetInputDim(framework::GradVarName("Out")); - PADDLE_ENFORCE_GE(out_dims.size(), 2, - platform::errors::InvalidArgument( - "The tensor rank of Input(Out@Grad) should be 2.")); + PADDLE_ENFORCE_GE( + out_dims.size(), 2, + platform::errors::InvalidArgument( + "The tensor rank of Input(Out@Grad) should be 2, but received %d.", + out_dims.size())); if (ctx->IsRuntime()) { - PADDLE_ENFORCE_EQ(out_dims[0], in_dims[0], - platform::errors::InvalidArgument( - "The 1st dimension of Input(Out@Grad) must be " - "same as input in SmoothL1LossGradOp.")); + PADDLE_ENFORCE_EQ( + out_dims[0], in_dims[0], + platform::errors::InvalidArgument( + "The 1st dimension of Input(Out@Grad) must be " + "same as input in SmoothL1LossGradOp, but received %d and %d.", + out_dims[0], in_dims[0])); PADDLE_ENFORCE_EQ(out_dims[1], 1, platform::errors::InvalidArgument( "The 2nd dimension of Input(Out@Grad) must be 1 in " - "SmoothL1LossGradOp.")); + "SmoothL1LossGradOp, but received %d.", + out_dims[1])); } auto x_grad_name = framework::GradVarName("X"); -- GitLab