From df00636bad51595793dcc3b59073dca72480cb37 Mon Sep 17 00:00:00 2001 From: lilong12 Date: Thu, 6 May 2021 11:35:55 +0800 Subject: [PATCH] update, test=develop (#32731) --- paddle/fluid/pybind/op_function_generator.cc | 1 - python/paddle/distributed/collective.py | 46 ++++++++++---------- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/paddle/fluid/pybind/op_function_generator.cc b/paddle/fluid/pybind/op_function_generator.cc index a340d7a0f00..bf3c7784321 100644 --- a/paddle/fluid/pybind/op_function_generator.cc +++ b/paddle/fluid/pybind/op_function_generator.cc @@ -44,7 +44,6 @@ std::map> op_ins_map = { {"gru_unit", {"Input", "HiddenPrev", "Weight", "Bias"}}, {"label_smooth", {"X", "PriorDist"}}, {"assign", {"X"}}, - {"send_v2", {"X"}}, {"reshape2", {"X", "Shape"}}, {"expand", {"X", "ExpandTimes"}}, {"slice", {"Input", "StartsTensor", "EndsTensor"}}, diff --git a/python/paddle/distributed/collective.py b/python/paddle/distributed/collective.py index 69a8f8956a8..fefabaf6976 100644 --- a/python/paddle/distributed/collective.py +++ b/python/paddle/distributed/collective.py @@ -1186,23 +1186,24 @@ def send(tensor, dst=0, group=None, use_calc_stream=True): tensor (Tensor): The Tensor to send. Its data type should be float16, float32, float64, int32 or int64. dst (int): The destination rank id. - group (Group): The group instance return by new_group or None for global default group. - use_calc_stream (bool): Whether to use calculate stream or communication stream. + group (Group, optional): The group instance return by new_group or None for global default group. Default: None. + use_calc_stream (bool, optional): Whether to use calculate stream or communication stream. Default: True. Returns: None. Examples: .. code-block:: python + # required: distributed import paddle - #from paddle.distributed import init_parallel_env - #init_parallel_env() - #if paddle.distributed.ParallelEnv().rank == 0: - # data = paddle.to_tensor([7, 8, 9]) - # paddle.distributed.send(data, dst=1) - #else: - # data = paddle.to_tensor([1,2,3]) - # paddle.distributed.recv(data, src=0) - #out = data.numpy() + from paddle.distributed import init_parallel_env + init_parallel_env() + if paddle.distributed.ParallelEnv().rank == 0: + data = paddle.to_tensor([7, 8, 9]) + paddle.distributed.send(data, dst=1) + else: + data = paddle.to_tensor([1,2,3]) + paddle.distributed.recv(data, src=0) + out = data.numpy() """ if group is not None and not group.is_member(): return @@ -1235,23 +1236,24 @@ def recv(tensor, src=0, group=None, use_calc_stream=True): tensor (Tensor): The Tensor to receive. Its data type should be float16, float32, float64, int32 or int64. src (int): The source rank id. - group (Group): The group instance return by new_group or None for global default group. - use_calc_stream (bool): Whether to use calculate stream or communication stream. + group (Group, optional): The group instance return by new_group or None for global default group. Default: None. + use_calc_stream (bool, optional): Whether to use calculate stream or communication stream. Default: True. Returns: None. Examples: .. code-block:: python + # required: distributed import paddle - #from paddle.distributed import init_parallel_env - #init_parallel_env() - #if paddle.distributed.ParallelEnv().rank == 0: - # data = paddle.to_tensor([7, 8, 9]) - # paddle.distributed.send(data, dst=1) - #else: - # data = paddle.to_tensor([1,2,3]) - # paddle.distributed.recv(data, src=0) - #out = data.numpy() + from paddle.distributed import init_parallel_env + init_parallel_env() + if paddle.distributed.ParallelEnv().rank == 0: + data = paddle.to_tensor([7, 8, 9]) + paddle.distributed.send(data, dst=1) + else: + data = paddle.to_tensor([1,2,3]) + paddle.distributed.recv(data, src=0) + out = data.numpy() """ if group is not None and not group.is_member(): return -- GitLab