未验证 提交 e5a62493 编写于 作者: C chajchaj 提交者: GitHub

Enhance error message of cross_entropy_op, sigmoid_cross_entropy_with_logits_op (#24485)

* error message of cross_entropy_op, test=develop
* fix bug : can't use platform::errors::InvalidArgument in HOSTDEVICE, test=develop
* fix bug: recovery the check_variable_and_dtype for rank_loss and bpr_loss, test=develop
上级 aa02e347
......@@ -25,12 +25,9 @@ class CrossEntropyOpBase : public framework::OperatorWithKernel {
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
"Input(Label) should be not null.");
PADDLE_ENFORCE_EQ(ctx->HasOutput("Y"), true,
"Output(Y) should be not null.");
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropy");
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label", "CrossEntropy");
OP_INOUT_CHECK(ctx->HasOutput("Y"), "Output", "Y", "CrossEntropy");
auto x_dims = ctx->GetInputDim("X");
auto label_dims = ctx->GetInputDim("Label");
......@@ -44,53 +41,61 @@ class CrossEntropyOpBase : public framework::OperatorWithKernel {
PADDLE_ENFORCE_EQ(
framework::slice_ddim(x_dims, 0, rank - 1),
framework::slice_ddim(label_dims, 0, rank - 1),
"ShapeError: Input(X) and Input(Label) shall have the same shape "
"except the last dimension. But received: the shape of Input(X) is "
"[%s],"
"the shape of Input(Label) is [%s].",
x_dims, label_dims);
platform::errors::InvalidArgument(
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension. But received: the shape of Input(X) "
"is "
"[%s], the shape of Input(Label) is [%s].",
x_dims, label_dims));
}
if (IsSoftLabel(ctx)) {
PADDLE_ENFORCE_EQ(
rank, label_dims.size(),
"ShapeError: If Attr(soft_label) == true, Input(X) and Input(Label) "
"shall have the same dimensions. But received: the dimensions of "
"Input(X) is [%d],"
"the shape of Input(X) is [%s], the dimensions of Input(Label) is "
"[%d], the shape of"
"Input(Label) is [%s]",
rank, x_dims, label_dims.size(), label_dims);
platform::errors::InvalidArgument(
"If Attr(soft_label) == true, Input(X) and Input(Label) "
"shall have the same dimensions. But received: the dimensions of "
"Input(X) is [%d],"
"the shape of Input(X) is [%s], the dimensions of Input(Label) "
"is "
"[%d], the shape of"
"Input(Label) is [%s]",
rank, x_dims, label_dims.size(), label_dims));
if (check) {
PADDLE_ENFORCE_EQ(
x_dims[rank - 1], label_dims[rank - 1],
"ShapeError: If Attr(soft_label) == true, the last dimension of "
"Input(X) and Input(Label) should be equal. But received: the"
"last dimension of Input(X) is [%d], the shape of Input(X) is [%s],"
"the last dimension of Input(Label) is [%d], the shape of "
"Input(Label)"
"is [%s], the last dimension is [%d].",
x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
rank - 1);
platform::errors::InvalidArgument(
"If Attr(soft_label) == true, the last dimension of "
"Input(X) and Input(Label) should be equal. But received: the"
"last dimension of Input(X) is [%d], the shape of Input(X) is "
"[%s],"
"the last dimension of Input(Label) is [%d], the shape of "
"Input(Label)"
"is [%s], the last dimension is [%d].",
x_dims[rank - 1], x_dims, label_dims[rank - 1], label_dims,
rank - 1));
}
} else {
if (rank == label_dims.size()) {
PADDLE_ENFORCE_EQ(
label_dims[rank - 1], 1UL,
"ShapeError: the last dimension of Input(Label) should be 1."
"But received: the last dimension of Input(Label) is [%d],"
"the last dimension is [%d]",
label_dims[rank - 1], rank - 1);
platform::errors::InvalidArgument(
"the last dimension of Input(Label) should be 1."
"But received: the last dimension of Input(Label) is [%d],"
"the last dimension is [%d]",
label_dims[rank - 1], rank - 1));
} else {
PADDLE_ENFORCE_EQ(rank, label_dims.size() + 1,
"ShapeError: The rank of Input(X) should be equal to "
"Input(Label) plus 1."
"But received: The dimension of Input(X) is [%d], "
"the shape of Input(X) is [%s],"
"the dimension of Input(Label) is [%d], the shape of "
"Input(Label) is [%s]",
rank, x_dims, label_dims.size(), label_dims);
PADDLE_ENFORCE_EQ(
rank, label_dims.size() + 1,
platform::errors::InvalidArgument(
"ShapeError: The rank of Input(X) should be equal to "
"Input(Label) plus 1."
"But received: The dimension of Input(X) is [%d], "
"the shape of Input(X) is [%s],"
"the dimension of Input(Label) is [%d], the shape of "
"Input(Label) is [%s]",
rank, x_dims, label_dims.size(), label_dims));
}
}
......@@ -122,19 +127,23 @@ class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const {
PADDLE_ENFORCE_EQ(ctx->HasInput("Label"), true,
"Input(Label) should be not null.");
PADDLE_ENFORCE_EQ(ctx->HasInput(framework::GradVarName("Y")), true,
"Input(Y@GRAD) shoudl be not null.");
PADDLE_ENFORCE_EQ(ctx->HasOutput(framework::GradVarName("X")), true,
"Output(X@GRAD) should be not null.");
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
"CrossEntropyGradientOpBase");
OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Y")), "Input",
framework::GradVarName("Y"), "CrossEntropyGradientOpBase");
OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
framework::GradVarName("X"), "CrossEntropyGradientOpBase");
auto x_dims = GetXDim(ctx);
auto label_dims = ctx->GetInputDim("Label");
auto dy_dims = ctx->GetInputDim(framework::GradVarName("Y"));
int rank = x_dims.size();
PADDLE_ENFORCE_EQ(dy_dims.size(), label_dims.size(),
"Input(Y@Grad) and Input(Y) should have the same rank.");
PADDLE_ENFORCE_EQ(
dy_dims.size(), label_dims.size(),
platform::errors::InvalidArgument(
"Input(Y@Grad) and Input(Y) should have the same rank."
"But received: Y@Grad's rank is [%d], Y's rank is [%d]",
dy_dims.size(), label_dims.size()));
bool check = true;
if ((!ctx->IsRuntime()) &&
......@@ -143,10 +152,15 @@ class CrossEntropyGradientOpBase : public framework::OperatorWithKernel {
}
if (check) {
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank - 1),
framework::slice_ddim(dy_dims, 0, rank - 1),
"The Input(X) and Input(Y@Grad) should have the same "
"shape except the last dimension.");
PADDLE_ENFORCE_EQ(
framework::slice_ddim(x_dims, 0, rank - 1),
framework::slice_ddim(dy_dims, 0, rank - 1),
platform::errors::InvalidArgument(
"The Input(X) and Input(Y@Grad) should have the same "
"shape except the last dimension. but received: "
"the shape of Input(X) is [%s], "
"the shape of Input(Y@Grad) is [%s].",
x_dims, dy_dims));
}
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
......@@ -253,7 +267,7 @@ class CrossEntropyGradientOp : public CrossEntropyGradientOpBase {
using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE_EQ(ctx->HasInput("X"), true, "Input(X) should be not null.");
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X", "CrossEntropyGradientOp");
CrossEntropyGradientOpBase::InferShape(ctx);
}
};
......@@ -281,11 +295,10 @@ class CrossEntropyOp2 : public CrossEntropyOpBase {
void InferShape(framework::InferShapeContext* ctx) const override {
CrossEntropyOpBase::InferShape(ctx);
PADDLE_ENFORCE_EQ(ctx->HasOutput("XShape"), true,
"Output(XShape) should be not null.");
PADDLE_ENFORCE_EQ(ctx->HasOutput("MatchX"), true,
"Output(MatchX) should be not null.");
OP_INOUT_CHECK(ctx->HasOutput("XShape"), "Output", "XShape",
"CrossEntropyOp2");
OP_INOUT_CHECK(ctx->HasOutput("MatchX"), "Output", "MatchX",
"CrossEntropyOp2");
auto x_dims = ctx->GetInputDim("X");
auto x_dims_vec = framework::vectorize(x_dims);
x_dims_vec.push_back(0);
......@@ -305,8 +318,8 @@ class CrossEntropyGradientOp2 : public CrossEntropyGradientOpBase {
public:
using CrossEntropyGradientOpBase::CrossEntropyGradientOpBase;
void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE_EQ(ctx->HasInput("MatchX"), true,
"Input(MatchX) must exist");
OP_INOUT_CHECK(ctx->HasInput("MatchX"), "Input", "MatchX",
"CrossEntropyGradientOp2");
CrossEntropyGradientOpBase::InferShape(ctx);
}
......
......@@ -166,11 +166,14 @@ struct HardLabelCrossEntropyForwardFunctor {
HOSTDEVICE void operator()(int64_t idx) const {
auto label = label_[idx];
if (label != ignore_index_) {
// don't update to PADDLE_ENFORCE_GE and PADDLE_ENFORCE_LT cause
// can't use platform::errors::InvalidArgument in HOSTDEVICE
PADDLE_ENFORCE(label >= 0 && label < feature_size_,
"Variable value (label) of "
"OP(fluid.layers.cross_entropy) expected >= 0 "
"and < %ld, but got %ld. Please check label value.",
feature_size_, label);
auto match_x = x_[idx * feature_size_ + label];
y_[idx] = -math::TolerableValue<T>()(real_log(match_x));
match_x_[idx] = match_x;
......
......@@ -28,16 +28,24 @@ class SigmoidCrossEntropyWithLogitsOp : public framework::OperatorWithKernel {
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should be not null.");
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X",
"SigmoidCrossEntropyWithLogitsOp");
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
"SigmoidCrossEntropyWithLogitsOp");
OP_INOUT_CHECK(ctx->HasOutput("Out"), "Output", "Out",
"SigmoidCrossEntropyWithLogitsOp");
auto x_dims = ctx->GetInputDim("X");
auto labels_dims = ctx->GetInputDim("Label");
int rank = x_dims.size();
PADDLE_ENFORCE_EQ(rank, labels_dims.size(),
"Input(X) and Input(Label) shall have the same rank.");
platform::errors::InvalidArgument(
"Input(X) and Input(Label) shall have the same rank."
"But received: the rank of Input(X) is [%d], "
"the rank of Input(Label) is [%d].",
rank, labels_dims.size()));
bool check = true;
if ((!ctx->IsRuntime()) && (framework::product(x_dims) <= 0 ||
framework::product(labels_dims) <= 0)) {
......@@ -45,10 +53,14 @@ class SigmoidCrossEntropyWithLogitsOp : public framework::OperatorWithKernel {
}
if (check) {
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank),
framework::slice_ddim(labels_dims, 0, rank),
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension.");
PADDLE_ENFORCE_EQ(
framework::slice_ddim(x_dims, 0, rank),
framework::slice_ddim(labels_dims, 0, rank),
platform::errors::InvalidArgument(
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension. But received: the shape of "
"Input(X) is [%s], the shape of Input(Label) is [%s].",
x_dims, labels_dims));
}
ctx->ShareDim("X", /*->*/ "Out");
......@@ -62,12 +74,16 @@ class SigmoidCrossEntropyWithLogitsGradOp
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should be not null.");
PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null.");
PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
"Input(Out@GRAD) shoudl be not null.");
PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("X")),
"Output(X@GRAD) should be not null.");
OP_INOUT_CHECK(ctx->HasInput("X"), "Input", "X",
"SigmoidCrossEntropyWithLogitsGradOp");
OP_INOUT_CHECK(ctx->HasInput("Label"), "Input", "Label",
"SigmoidCrossEntropyWithLogitsGradOp");
OP_INOUT_CHECK(ctx->HasInput(framework::GradVarName("Out")), "Input",
framework::GradVarName("Out"),
"SigmoidCrossEntropyWithLogitsGradOp");
OP_INOUT_CHECK(ctx->HasOutput(framework::GradVarName("X")), "Output",
framework::GradVarName("X"),
"SigmoidCrossEntropyWithLogitsGradOp");
auto x_dims = ctx->GetInputDim("X");
auto labels_dims = ctx->GetInputDim("Label");
......@@ -81,14 +97,23 @@ class SigmoidCrossEntropyWithLogitsGradOp
}
if (check) {
PADDLE_ENFORCE_EQ(framework::slice_ddim(x_dims, 0, rank),
framework::slice_ddim(labels_dims, 0, rank),
"Input(X) and Input(Label) shall have the same shape.");
PADDLE_ENFORCE_EQ(
framework::slice_ddim(x_dims, 0, rank),
framework::slice_ddim(labels_dims, 0, rank),
platform::errors::InvalidArgument(
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension. But received: the shape of "
"Input(X) is [%s], the shape of Input(Label) is [%s].",
x_dims, labels_dims));
PADDLE_ENFORCE_EQ(
framework::slice_ddim(x_dims, 0, rank),
framework::slice_ddim(dout_dims, 0, rank),
"Input(X) and Input(Out@Grad) shall have the same shape.");
platform::errors::InvalidArgument(
"Input(X) and Input(Out@Grad) shall have the same shape "
"except the last dimension. But received: the shape of "
"Input(X) is [%s], the shape of Input(Out@Grad) is [%s].",
x_dims, dout_dims));
}
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
......
......@@ -1410,9 +1410,14 @@ def sigmoid_cross_entropy_with_logits(x,
${comment}
Args:
x(${x_type}): ${x_comment}
label(${label_type}): ${label_comment}
ignore_index(int): ${ignore_index_comment}
x(Variable): a 2-D tensor with shape N x D, where N is the batch size and
D is the number of classes. This input is a tensor of logits computed
by the previous operator. Logits are unscaled log probabilities given
as log(p/(1-p)) The data type should be float32 or float64.
label (Variable): a 2-D tensor of the same type and shape as X.
This input is a tensor of probabalistic labels for each logit.
ignore_index(int): Specifies a target value that is ignored and
does not contribute to the input gradient.
name(str|None): The default value is None. Normally there is
no need for user to set this property. For more information,
please refer to :ref:`api_guide_Name`
......@@ -1437,6 +1442,8 @@ def sigmoid_cross_entropy_with_logits(x,
normalize=True) # or False
# loss = fluid.layers.reduce_sum(loss) # summation of loss
"""
check_variable_and_dtype(x, 'input', ['float16', 'float32', 'float64'],
'sigmoid_cross_entropy_with_logits')
helper = LayerHelper("sigmoid_cross_entropy_with_logits", **locals())
......
......@@ -20,6 +20,8 @@ from scipy.special import logit
from scipy.special import expit
import paddle.fluid.core as core
import unittest
from paddle.fluid import compiler, Program, program_guard
import paddle.fluid as fluid
class TestSigmoidCrossEntropyWithLogitsOp1(OpTest):
......@@ -242,5 +244,31 @@ class TestSigmoidCrossEntropyWithLogitsOp6(OpTest):
self.check_grad(['X'], 'Out')
class TestSigmoidCrossEntropyWithLogitsOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
def test_Variable():
# the input of sigmoid_cross_entropy_with_logits must be Variable.
x1 = fluid.create_lod_tensor(
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
lab1 = fluid.create_lod_tensor(
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
fluid.layers.sigmoid_cross_entropy_with_logits(x1, lab1)
self.assertRaises(TypeError, test_Variable)
def test_dtype():
# the input dtype of sigmoid_cross_entropy_with_logits must be float16 or float32 or float64
# float16 only can be set on GPU place
x2 = fluid.layers.data(
name='x2', shape=[3, 4, 5, 6], dtype="int32")
lab2 = fluid.layers.data(
name='lab2', shape=[3, 4, 5, 6], dtype="int32")
fluid.layers.sigmoid_cross_entropy_with_logits(x2, lab2)
self.assertRaises(TypeError, test_dtype)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册