diff --git a/python/paddle/distributed/communication/stream/recv.py b/python/paddle/distributed/communication/stream/recv.py index d572dd44622b842e82395cfd478a04cc8ac44205..25a8173788473aa79f9f32ddae9945d69156fb80 100644 --- a/python/paddle/distributed/communication/stream/recv.py +++ b/python/paddle/distributed/communication/stream/recv.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import paddle.distributed.collective as collective import paddle.fluid.framework as framework +from paddle.distributed import collective def _recv_in_dygraph(tensor, src, group, sync_op, use_calc_stream): diff --git a/python/paddle/distributed/communication/stream/reduce_scatter.py b/python/paddle/distributed/communication/stream/reduce_scatter.py index a4aeae6312a302b49f6562e6d8bf9b909d7f387f..71fc93478448fcd51e55a72951505cac22a790a3 100644 --- a/python/paddle/distributed/communication/stream/reduce_scatter.py +++ b/python/paddle/distributed/communication/stream/reduce_scatter.py @@ -13,7 +13,6 @@ # limitations under the License. import paddle -import paddle.distributed as dist import paddle.fluid.framework as framework from paddle.distributed.communication.group import _get_global_group from paddle.distributed.communication.reduce import _get_reduce_op, ReduceOp diff --git a/python/paddle/distributed/communication/stream/scatter.py b/python/paddle/distributed/communication/stream/scatter.py index 3a3fb00534783897bfc9c75856f1a54dd1969773..ee75583d1614483c679a58b4cb56185100448072 100644 --- a/python/paddle/distributed/communication/stream/scatter.py +++ b/python/paddle/distributed/communication/stream/scatter.py @@ -13,7 +13,6 @@ # limitations under the License. import paddle -import paddle.distributed as dist import paddle.fluid.framework as framework from paddle.distributed import collective @@ -44,7 +43,7 @@ def _scatter_tensor_in_dygraph(out_tensor, in_tensor, src, group, sync_op, raise RuntimeError("Src rank out of group.") nranks = group.nranks - rank = dist.get_rank() + rank = paddle.distributed.get_rank() if rank == src_rank: _check_tensor_shape(out_tensor, in_tensor.shape, nranks) @@ -69,7 +68,7 @@ def _scatter_in_dygraph(tensor, tensor_list, src, group, sync_op, raise RuntimeError("Src rank out of group.") nranks = group.nranks - rank = dist.get_rank() + rank = paddle.distributed.get_rank() if rank == src_rank: if len(tensor_list) == 0: raise RuntimeError( diff --git a/python/paddle/distributed/communication/stream/send.py b/python/paddle/distributed/communication/stream/send.py index 49eceed55e31f22c666eb79c61b06ae3ce87ac9e..41ec2c0141b1227933a4df5c523455f2e02a8e9d 100644 --- a/python/paddle/distributed/communication/stream/send.py +++ b/python/paddle/distributed/communication/stream/send.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import paddle.distributed.collective as collective import paddle.fluid.framework as framework +from paddle.distributed import collective def _send_in_dygraph(tensor, dst, group, sync_op, use_calc_stream):