diff --git a/paddle/fluid/operators/roi_align_op.cc b/paddle/fluid/operators/roi_align_op.cc index a019cbd0b190eeea0c6fe48f0842d02d8cb765b3..b8fa1caadae545f851033dd41c6b0a70841c6321 100644 --- a/paddle/fluid/operators/roi_align_op.cc +++ b/paddle/fluid/operators/roi_align_op.cc @@ -23,35 +23,59 @@ class ROIAlignOp : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; void InferShape(framework::InferShapeContext* ctx) const override { - PADDLE_ENFORCE(ctx->HasInput("X"), - "Input(X) of ROIAlignOp should not be null."); - PADDLE_ENFORCE(ctx->HasInput("ROIs"), - "Input(ROIs) of ROIAlignOp should not be null."); - PADDLE_ENFORCE(ctx->HasOutput("Out"), - "Output(Out) of ROIAlignOp should not be null."); + PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, + platform::errors::NotFound("Input(X) of ROIAlignOp " + "is not found.")); + PADDLE_ENFORCE_EQ(ctx->HasInput("ROIs"), true, + platform::errors::NotFound("Input(ROIs) of ROIAlignOp " + "is not found.")); + PADDLE_ENFORCE_EQ(ctx->HasOutput("Out"), true, + platform::errors::NotFound("Output(Out) of ROIAlignOp " + "is not found.")); auto input_dims = ctx->GetInputDim("X"); auto rois_dims = ctx->GetInputDim("ROIs"); - PADDLE_ENFORCE(input_dims.size() == 4, - "The format of input tensor is NCHW."); - PADDLE_ENFORCE(rois_dims.size() == 2, - "ROIs should be a 2-D LoDTensor of shape (num_rois, 4)" - "given as [[x1, y1, x2, y2], ...]."); + PADDLE_ENFORCE_EQ( + input_dims.size(), 4, + platform::errors::InvalidArgument( + "The format of Input(X) in" + "RoIAlignOp is NCHW. And the rank of input must be 4. " + "But received rank = %d", + input_dims.size())); + PADDLE_ENFORCE_EQ(rois_dims.size(), 2, platform::errors::InvalidArgument( + "The rank of Input(ROIs) " + "in RoIAlignOp should be 2. " + "But the rank of RoIs is %d", + rois_dims.size())); if (ctx->IsRuntime()) { - PADDLE_ENFORCE(rois_dims[1] == 4, - "ROIs should be a 2-D LoDTensor of shape (num_rois, 4)" - "given as [[x1, y1, x2, y2], ...]."); + PADDLE_ENFORCE_EQ(rois_dims[1], 4, + platform::errors::InvalidArgument( + "The second dimension " + "of Input(ROIs) should be 4. But received the " + "dimension = %d", + rois_dims[1])); } int pooled_height = ctx->Attrs().Get("pooled_height"); int pooled_width = ctx->Attrs().Get("pooled_width"); float spatial_scale = ctx->Attrs().Get("spatial_scale"); PADDLE_ENFORCE_GT(pooled_height, 0, - "The pooled output height must greater than 0"); + platform::errors::InvalidArgument( + "The pooled output " + "height must greater than 0. But received " + "pooled_height = %d", + pooled_height)); PADDLE_ENFORCE_GT(pooled_width, 0, - "The pooled output width must greater than 0"); + platform::errors::InvalidArgument( + "The pooled output " + "width must greater than 0. But received " + "pooled_width = %d", + pooled_width)); PADDLE_ENFORCE_GT(spatial_scale, 0.0f, - "The spatial scale must greater than 0"); + platform::errors::InvalidArgument( + "The spatial scale " + "must greater than 0 But received spatial_scale = %f", + spatial_scale)); auto out_dims = input_dims; out_dims[0] = rois_dims[0]; @@ -76,10 +100,13 @@ class ROIAlignGradOp : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; void InferShape(framework::InferShapeContext* ctx) const override { - PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")), - "The GRAD@Out of ROIAlignGradOp should not be null."); - PADDLE_ENFORCE(ctx->HasOutputs(framework::GradVarName("X")), - "The GRAD@X of ROIAlignGradOp should not be null."); + PADDLE_ENFORCE_EQ( + ctx->HasInput(framework::GradVarName("Out")), true, + platform::errors::NotFound("The GRAD@Out of ROIAlignGradOp " + "is not found.")); + PADDLE_ENFORCE_EQ(ctx->HasOutputs(framework::GradVarName("X")), true, + platform::errors::NotFound("The GRAD@X of ROIAlignGradOp " + "is not found.")); ctx->SetOutputsDim(framework::GradVarName("X"), ctx->GetInputsDim("X")); } diff --git a/paddle/fluid/operators/roi_align_op.cu b/paddle/fluid/operators/roi_align_op.cu index 41e6040496c2dbbad0bbf54f5372cf417e4d293d..6ebae590177fa1c44a7d0a5bbeb9edc9dc4f4f96 100644 --- a/paddle/fluid/operators/roi_align_op.cu +++ b/paddle/fluid/operators/roi_align_op.cu @@ -266,7 +266,11 @@ class GPUROIAlignOpKernel : public framework::OpKernel { int rois_batch_size = rois_lod.size() - 1; PADDLE_ENFORCE_EQ( rois_batch_size, batch_size, - "The rois_batch_size and imgs batch_size must be the same."); + platform::errors::InvalidArgument( + "The rois_batch_size and imgs " + "batch_size must be the same. But received rois_batch_size = %d, " + "batch_size = %d", + rois_batch_size, batch_size)); int rois_num_with_lod = rois_lod[rois_batch_size]; PADDLE_ENFORCE_EQ(rois_num, rois_num_with_lod, "The rois_num from input and lod must be the same."); diff --git a/paddle/fluid/operators/roi_align_op.h b/paddle/fluid/operators/roi_align_op.h index 8c6b7cfe5d106b40cbaf1a662d185103c11daa6b..e40bc1d031266c53f31efe66fcc573d5fbdb674b 100644 --- a/paddle/fluid/operators/roi_align_op.h +++ b/paddle/fluid/operators/roi_align_op.h @@ -172,7 +172,11 @@ class CPUROIAlignOpKernel : public framework::OpKernel { int rois_batch_size = rois_lod.size() - 1; PADDLE_ENFORCE_EQ( rois_batch_size, batch_size, - "The rois_batch_size and imgs batch_size must be the same."); + platform::errors::InvalidArgument( + "The rois_batch_size and imgs " + "batch_size must be the same. But received rois_batch_size = %d, " + "batch_size = %d", + rois_batch_size, batch_size)); int rois_num_with_lod = rois_lod[rois_batch_size]; PADDLE_ENFORCE_EQ(rois_num, rois_num_with_lod, "The rois_num from input and lod must be the same."); diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 0da5b0612700bf01db9728f2fe3c05ab0c0842c7..1f1fcd5f46dafdabc801f2ee290d5e380acb109d 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -6723,6 +6723,9 @@ def roi_align(input, spatial_scale=0.5, sampling_ratio=-1) """ + check_variable_and_dtype(input, 'input', ['float32', 'float64'], + 'roi_align') + check_variable_and_dtype(rois, 'rois', ['float32', 'float64'], 'roi_align') helper = LayerHelper('roi_align', **locals()) dtype = helper.input_dtype() align_out = helper.create_variable_for_type_inference(dtype)