diff --git a/paddle/fluid/operators/optimizers/lamb_op.h b/paddle/fluid/operators/optimizers/lamb_op.h index b1e37e2b217504129c5086d7686449ce13893ea3..58192a2f2bc4e9edae70f8325bf1a23adfa12030 100644 --- a/paddle/fluid/operators/optimizers/lamb_op.h +++ b/paddle/fluid/operators/optimizers/lamb_op.h @@ -177,11 +177,12 @@ class LambOpKernel : public framework::OpKernel { public: void Compute(const framework::ExecutionContext& ctx) const override { const auto* param_var = ctx.InputVar("Param"); - PADDLE_ENFORCE(param_var->IsType(), - "The Var(%s)'s type should be LoDTensor, " - "but the received is %s", - ctx.InputNames("Param").front(), - framework::ToTypeName(param_var->Type())); + PADDLE_ENFORCE_EQ(param_var->IsType(), true, + platform::errors::InvalidArgument( + "The Var(%s)'s type should be LoDTensor, " + "but the received is %s", + ctx.InputNames("Param").front(), + framework::ToTypeName(param_var->Type()))); using paddle::framework::LoDTensor; @@ -274,7 +275,10 @@ class LambOpKernel : public framework::OpKernel { row_numel, grad_merge.rows().size()); for_range(moment_update_functor); } else { - PADDLE_THROW("Variable type not supported by lamb_op."); + PADDLE_THROW(platform::errors::InvalidArgument( + "Variable type not supported by lamb_op. Expect LoDTensor or " + "SelectedRows, but got %s", + framework::ToTypeName(param_var->Type()))); } // Update parameter diff --git a/paddle/fluid/operators/unstack_op.cc b/paddle/fluid/operators/unstack_op.cc index f86f7ee28b16830636570d29b767ef46e3fa342c..2f71f10a1c4177a0accba42f2e7f65a371c1439a 100644 --- a/paddle/fluid/operators/unstack_op.cc +++ b/paddle/fluid/operators/unstack_op.cc @@ -27,24 +27,35 @@ class UnStackOp : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; void InferShape(framework::InferShapeContext *ctx) const override { - PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) must exist."); - + OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "UnStack"); int axis = ctx->Attrs().Get("axis"); int num = ctx->Attrs().Get("num"); auto x_dim = ctx->GetInputDim("X"); int rank = x_dim.size(); - PADDLE_ENFORCE_GE( - axis, -rank, "Attr(axis) must be inside [-rank, rank), where rank = %d", - rank); - PADDLE_ENFORCE_LT( - axis, rank, "Attr(axis) must be inside [-rank, rank), where rank = %d", - rank); + PADDLE_ENFORCE_GE(axis, -rank, + platform::errors::InvalidArgument( + "The attribute axis is out of range, it must be " + "inside [-rank, rank), where rank = %d", + rank)); + PADDLE_ENFORCE_LT(axis, rank, + platform::errors::InvalidArgument( + "The attribute axis is out of range, it must be " + "inside [-rank, rank), where rank = %d", + rank)); if (axis < 0) axis += rank; PADDLE_ENFORCE_EQ(ctx->Outputs("Y").size(), static_cast(num), - "Number of Outputs(Y) is wrong"); + platform::errors::InvalidArgument( + "Number of Outputs(Y) is wrong. Got %d , but it must " + "equal to attribute num which is %d.", + ctx->Outputs("Y").size(), static_cast(num))); if (x_dim[axis] > 0) { - PADDLE_ENFORCE_EQ(num, x_dim[axis], "Number of Outputs(Y) is wrong"); + PADDLE_ENFORCE_EQ( + num, x_dim[axis], + platform::errors::InvalidArgument( + "The number of attribute num is not equal to the length of the " + "%d axis of Input(X). Expect %d but got %d.", + axis, x_dim[axis], num)); } auto vec = framework::vectorize(x_dim); vec.erase(vec.begin() + axis); @@ -89,24 +100,29 @@ class UnStackGradOp : public framework::OperatorWithKernel { void InferShape(framework::InferShapeContext *ctx) const override { PADDLE_ENFORCE_GT(ctx->Inputs(framework::GradVarName("Y")).size(), 0, - "Number of Inputs(Y@Grad) must be larger than 0"); - PADDLE_ENFORCE_EQ(ctx->HasOutput(framework::GradVarName("X")), true, - "Output(X@Grad) must exist."); - + platform::errors::InvalidArgument( + "Number of Inputs(Y@Grad) must be larger than 0")); + OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output", "X", + "UnStackGrad"); auto input_dims = ctx->GetInputsDim(framework::GradVarName("Y")); for (size_t i = 1; i < input_dims.size(); ++i) { PADDLE_ENFORCE_EQ(input_dims[i], input_dims[0], - "Dims of all Inputs(Y@Grad) must be the same"); + platform::errors::InvalidArgument( + "Dims of all Inputs(Y@Grad) must be the same")); } int axis = ctx->Attrs().Get("axis"); int rank = input_dims[0].size(); - PADDLE_ENFORCE_GE( - axis, -(rank + 1), - "Attr(axis) must be inside [-(rank+1), rank+1), where rank = %d", rank); - PADDLE_ENFORCE_LT( - axis, rank + 1, - "Attr(axis) must be inside [-(rank+1), rank+1), where rank = %d", rank); + PADDLE_ENFORCE_GE(axis, -(rank + 1), + platform::errors::InvalidArgument( + "The attribute axis is out of range, it must be " + "inside [-(rank+1), rank+1), where rank = %d", + rank)); + PADDLE_ENFORCE_LT(axis, rank + 1, + platform::errors::InvalidArgument( + "The attribute axis is out of range, it must be " + "inside [-(rank+1), rank+1), where rank = %d", + rank)); if (axis < 0) axis += (rank + 1); auto vec = framework::vectorize(input_dims[0]);