From a1373714f76bf85d79baad02d29f6f27cb9b7a8e Mon Sep 17 00:00:00 2001 From: WangXi Date: Wed, 18 Aug 2021 11:16:14 +0800 Subject: [PATCH] NPU use squared_l2_norm in GradientClipByGlobalNorm (#34836) --- python/paddle/fluid/clip.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/fluid/clip.py b/python/paddle/fluid/clip.py index 04fb45cd3ae..d48cea48a76 100644 --- a/python/paddle/fluid/clip.py +++ b/python/paddle/fluid/clip.py @@ -40,7 +40,7 @@ def _squared_l2_norm(x): This OP returns the squared L2 norm of a tensor. """ - if core.is_compiled_with_npu() or core.is_compiled_with_xpu(): + if core.is_compiled_with_xpu(): square = layers.square(x) sum_square = layers.reduce_sum(square) return sum_square -- GitLab