From 9ab3c76b4b4c01f91e31e1bdac1d562349fb6986 Mon Sep 17 00:00:00 2001 From: zhangbo9674 <82555433+zhangbo9674@users.noreply.github.com> Date: Fri, 25 Mar 2022 22:12:41 +0800 Subject: [PATCH] fix sync_bn error in fp16 amp-o2 (#40943) --- python/paddle/fluid/dygraph/amp/auto_cast.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/paddle/fluid/dygraph/amp/auto_cast.py b/python/paddle/fluid/dygraph/amp/auto_cast.py index bd97a61249f..f7d4be7ee6e 100644 --- a/python/paddle/fluid/dygraph/amp/auto_cast.py +++ b/python/paddle/fluid/dygraph/amp/auto_cast.py @@ -171,7 +171,7 @@ def pure_fp16_initialize(models): if (layer._dtype == 'float16') or isinstance( layer, (paddle.nn.BatchNorm, paddle.nn.BatchNorm1D, paddle.nn.BatchNorm2D, paddle.nn.BatchNorm3D, - paddle.nn.LayerNorm)): + paddle.nn.LayerNorm, paddle.nn.SyncBatchNorm)): continue layer._to_impl(dtype='float16', include_sublayers=False) return models -- GitLab