diff --git a/paddle/fluid/operators/cross_entropy_op.cc b/paddle/fluid/operators/cross_entropy_op.cc index 66986e14d8f4a25c57c2c90c422171564874239f..8a80619f6636f9f0cab1d0b6332ca05742b9e7f8 100644 --- a/paddle/fluid/operators/cross_entropy_op.cc +++ b/paddle/fluid/operators/cross_entropy_op.cc @@ -136,8 +136,8 @@ class CrossEntropyGradientOpBase : public framework::OperatorWithKernel { "Input(Y@Grad) and Input(Y) should have the same rank."); bool check = true; - if ((!ctx->IsRuntime()) && (framework::product(x_dims) <= 0 || - framework::product(label_dims) <= 0)) { + if ((!ctx->IsRuntime()) && + (framework::product(x_dims) <= 0 || framework::product(dy_dims) <= 0)) { check = false; } diff --git a/python/paddle/fluid/backward.py b/python/paddle/fluid/backward.py index 1306fbc574e28bfb5619b1504c197e1d90778ddb..860e7092f6958c477668695bf28aec03d876aa9f 100644 --- a/python/paddle/fluid/backward.py +++ b/python/paddle/fluid/backward.py @@ -1251,7 +1251,7 @@ def calc_gradient(targets, inputs, target_gradients=None, no_grad_set=None): op_desc = _create_op_desc_("fill_constant", {"ShapeTensor": [target_shape.name]}, {"Out": [grad_name]}, { - "shape": [], + "shape": target.shape, "value": 1.0, "dtype": target.dtype, })