From c4787d7638a3ebde268c96c630cd457177968b31 Mon Sep 17 00:00:00 2001 From: Kqnonrime <36952116+Kqnonrime@users.noreply.github.com> Date: Fri, 14 May 2021 17:29:10 +0800 Subject: [PATCH] Fix four error messages (#32899) * fix two error message * fix two error message * fix error * fix error * fix error * fix error * fix some error message * fix some error * fix error * fix some error * fix some error * fix some error * fix one error * fix some error * fix seven error message * fix error * fix error * fix error * fix error * fix some error message * fix error * fix some error * fix some error * fix four error message * fix error * fix error --- .../operators/fused/fused_bn_activation_op.cc | 4 +++- .../fused/fused_embedding_eltwise_layernorm_op.cc | 15 +++++++++------ .../fused/fusion_transpose_flatten_concat_op.cc | 4 +++- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/paddle/fluid/operators/fused/fused_bn_activation_op.cc b/paddle/fluid/operators/fused/fused_bn_activation_op.cc index 97cd4d90be6..e9ad2895e03 100644 --- a/paddle/fluid/operators/fused/fused_bn_activation_op.cc +++ b/paddle/fluid/operators/fused/fused_bn_activation_op.cc @@ -173,7 +173,9 @@ void FusedBatchNormActOpMaker::Make() { .AddCustomChecker([](const float &epsilon) { PADDLE_ENFORCE_EQ(epsilon >= 0.0f && epsilon <= 0.001f, true, platform::errors::InvalidArgument( - "'epsilon' should be between 0.0 and 0.001.")); + "Attr(epsilon) should be between 0.0 and 0.001, " + "but received value is %f.", + epsilon)); }); AddAttr("act_type", "The activation type to be fused.") .SetDefault("relu"); diff --git a/paddle/fluid/operators/fused/fused_embedding_eltwise_layernorm_op.cc b/paddle/fluid/operators/fused/fused_embedding_eltwise_layernorm_op.cc index b53b407d499..4d270280d38 100644 --- a/paddle/fluid/operators/fused/fused_embedding_eltwise_layernorm_op.cc +++ b/paddle/fluid/operators/fused/fused_embedding_eltwise_layernorm_op.cc @@ -25,11 +25,13 @@ class EmbeddingEltWiseLayerNormOp : public framework::OperatorWithKernel { protected: void InferShape(framework::InferShapeContext* context) const override { - PADDLE_ENFORCE_EQ(context->Inputs("Ids").size(), - context->Inputs("Embs").size(), - platform::errors::InvalidArgument( - "Two inputs of EmbeddingEltWiseLayerNormOp shoube be " - "the same size")); + PADDLE_ENFORCE_EQ( + context->Inputs("Ids").size(), context->Inputs("Embs").size(), + platform::errors::InvalidArgument( + "Two inputs of EmbeddingEltWiseLayerNormOp shoube be " + "the same size, but received the size of input Ids = %d," + " the size of input Embs = %d", + context->Inputs("Ids").size(), context->Inputs("Embs").size())); PADDLE_ENFORCE_GE(context->Inputs("Embs").size(), 2UL, platform::errors::InvalidArgument( "Input Embs of EmbeddingEltWiseLayerNormOp should " @@ -77,7 +79,8 @@ class EmbeddingEltWiseLayerNormOp : public framework::OperatorWithKernel { PADDLE_ENFORCE_EQ( embs_dims[i][1], hidden, platform::errors::InvalidArgument( - "The Emb first dim size(%d) shoule equal to hidden (%d).", + "The second dimension size(%d) of the Embedding should be " + "equal to the hidden's size(%d)", embs_dims[i][1], hidden)); } diff --git a/paddle/fluid/operators/fused/fusion_transpose_flatten_concat_op.cc b/paddle/fluid/operators/fused/fusion_transpose_flatten_concat_op.cc index bd376b1e7aa..382d01f6a53 100644 --- a/paddle/fluid/operators/fused/fusion_transpose_flatten_concat_op.cc +++ b/paddle/fluid/operators/fused/fusion_transpose_flatten_concat_op.cc @@ -40,7 +40,9 @@ class TransposeFlattenConcatFusionOp : public framework::OperatorWithKernel { const size_t n = ins.size(); PADDLE_ENFORCE_GT(n, 0, platform::errors::InvalidArgument( - "Input tensors dim size should greater than 0.")); + "The size of Inputs(X)'s dimension should be greater " + " than 0, but received %d.", + n)); std::vector trans_axis = ctx->Attrs().Get>("trans_axis"); -- GitLab