From 60f9c60cd8196c66c391d79c35d341e9072f8838 Mon Sep 17 00:00:00 2001 From: wuhuachaocoding <77733235+wuhuachaocoding@users.noreply.github.com> Date: Tue, 20 Sep 2022 16:46:15 +0800 Subject: [PATCH] update for py3.6. (#46269) * update for py3.6. * update for py_3.6 --- python/paddle/incubate/optimizer/distributed_fused_lamb.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/paddle/incubate/optimizer/distributed_fused_lamb.py b/python/paddle/incubate/optimizer/distributed_fused_lamb.py index f8e3b55aba6..60661700e13 100644 --- a/python/paddle/incubate/optimizer/distributed_fused_lamb.py +++ b/python/paddle/incubate/optimizer/distributed_fused_lamb.py @@ -19,7 +19,6 @@ from paddle.fluid.clip import ClipGradByGlobalNorm from paddle.fluid.initializer import Constant from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.optimizer import Optimizer -import paddle.distributed as dist from paddle.distributed.collective import new_group from paddle.fluid.executor import global_scope from paddle.fluid.framework import name_scope @@ -288,8 +287,9 @@ class DistributedFusedLamb(Optimizer): step = self._get_or_create_step() - rank = dist.get_rank() - nranks = dist.get_world_size() + from paddle.distributed import get_rank, get_world_size + rank = get_rank() + nranks = get_world_size() if self._nproc_per_node is None: nproc_per_node = nranks else: -- GitLab