diff --git a/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py b/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py index cd44d584bbb02b95cc9526652454f86ac5500f4b..b8086eaf4a1ea32ce126fc262f46b1675680034b 100644 --- a/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py +++ b/python/paddle/fluid/tests/unittests/test_cross_entropy_loss.py @@ -219,6 +219,47 @@ class CrossEntropyLoss(unittest.TestCase): self.assertTrue(np.allclose(static_ret, expected)) self.assertTrue(np.allclose(dy_ret_value, expected)) + def test_cross_entropy_loss_1d_with_weight_none_func(self): + input_np = np.random.random([100, 200]).astype(np.float64) #N,C + label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) #N + weight_np = np.random.random([200]).astype(np.float64) #C + paddle.enable_static() + prog = fluid.Program() + startup_prog = fluid.Program() + place = fluid.CUDAPlace(0) if fluid.core.is_compiled_with_cuda( + ) else fluid.CPUPlace() + with fluid.program_guard(prog, startup_prog): + input = fluid.data(name='input', shape=[100, 200], dtype='float64') + label = fluid.data(name='label', shape=[100], dtype='int64') + weight = fluid.data(name='weight', shape=[200], dtype='float64') + ret = paddle.nn.functional.cross_entropy( + input, label, weight=weight, reduction='none') + + exe = fluid.Executor(place) + static_ret = exe.run(prog, + feed={ + 'input': input_np, + 'label': label_np, + "weight": weight_np + }, + fetch_list=[ret]) + static_ret = np.squeeze(static_ret) + self.assertIsNotNone(static_ret) + with fluid.dygraph.guard(): + dy_ret = paddle.nn.functional.cross_entropy( + fluid.dygraph.to_variable(input_np), + fluid.dygraph.to_variable(label_np), + weight=fluid.dygraph.to_variable(weight_np), + reduction='none') + dy_ret_value = dy_ret.numpy() + dy_ret_value = np.squeeze(dy_ret_value) + self.assertIsNotNone(dy_ret_value) + expected = cross_entropy_loss_1d( + input_np, label_np, weight=weight_np, reduction='none') + self.assertTrue(np.allclose(static_ret, dy_ret_value)) + self.assertTrue(np.allclose(static_ret, expected)) + self.assertTrue(np.allclose(dy_ret_value, expected)) + def test_cross_entropy_loss_1d_mean(self): input_np = np.random.random([100, 200]).astype(np.float64) #N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) #N,1 diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index df83b174b8aba9caa654d01861388282cc1a2c14..d89529db0af6ea50972309850a58b48ed0c57cd8 100755 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -1236,6 +1236,8 @@ def cross_entropy(input, else: return core.ops.mean(out) else: + if input_dims - 1 == label_dims: + out = paddle.squeeze(out, axis=axis) return out fluid.data_feeder.check_variable_and_dtype( @@ -1267,6 +1269,9 @@ def cross_entropy(input, else: return paddle.mean(out, name=name) else: + if input_dims - 1 == label_dims: + out = paddle.squeeze(out, axis=axis) + return out