From e74f287bb32fe899587b0881785ce29fe8d3ee1d Mon Sep 17 00:00:00 2001 From: Guoxia Wang Date: Tue, 7 Jun 2022 11:25:33 +0800 Subject: [PATCH] fix the unittest bug of none grad of margin_cross_entropy when FLAGS_retain_grad_for_all_tensor change default setting (#43241) --- .../fluid/tests/unittests/parallel_margin_cross_entropy.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/paddle/fluid/tests/unittests/parallel_margin_cross_entropy.py b/python/paddle/fluid/tests/unittests/parallel_margin_cross_entropy.py index b77a04d8ee..26e9e05b82 100644 --- a/python/paddle/fluid/tests/unittests/parallel_margin_cross_entropy.py +++ b/python/paddle/fluid/tests/unittests/parallel_margin_cross_entropy.py @@ -39,6 +39,7 @@ class TestParallelMarginSoftmaxCrossEntropyOp(unittest.TestCase): def setUp(self): strategy = fleet.DistributedStrategy() fleet.init(is_collective=True, strategy=strategy) + paddle.fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True}) def test_parallel_margin_softmax_cross_entropy(self): margin1s = [1.0, 1.0, 1.35] -- GitLab