diff --git a/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py b/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py index 3e8d416de18fcf58b19eff38e290d9e67803e9df..b30a5227a9a93c4970b95e814f8e6368f06644a2 100644 --- a/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py +++ b/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py @@ -20,6 +20,7 @@ import numpy as np import unittest from test_softmax_op import stable_softmax from test_softmax_with_cross_entropy_op import cross_entropy +from paddle.fluid import Program, program_guard def stable_softmax(x): diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 323d6fb0288df45f1da7221f61c412da35f680f0..cf4d5b1ed35afebfe23e1fea1a436c7a633a878c 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -1411,7 +1411,7 @@ def cross_entropy(input, out = core.ops.elementwise_mul(out, weight_gather_reshape) else: - for label_val in label: + for label_val in label.flatten(): if label_val < 0 or label_val >= input.shape[-1]: raise ValueError( 'Expected 0 <= label_value < class_dimension({}), but got label_value {}'.