From 1cb4c154d1f5a6d550bcbf861e62cb37f0c87ca0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=8E=E5=AD=A3?= <2042519524@qq.com> Date: Mon, 26 Jul 2021 11:23:21 +0800 Subject: [PATCH] Fix the bug in paddle.distributed.split demo. The paddle.distributed.split op just can be used in static mode. (#34306) * fix the bug in paddle.distributed.split demo --- python/paddle/distributed/collective.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/python/paddle/distributed/collective.py b/python/paddle/distributed/collective.py index cfba9f656b3..df6df856222 100644 --- a/python/paddle/distributed/collective.py +++ b/python/paddle/distributed/collective.py @@ -1341,20 +1341,20 @@ def split(x, Examples: .. code-block:: python - + # required: distributed import paddle - from paddle.distributed import init_parallel_env - - # required: gpu + import paddle.distributed.fleet as fleet + paddle.enable_static() paddle.set_device('gpu:%d'%paddle.distributed.ParallelEnv().dev_id) - init_parallel_env() + fleet.init(is_collective=True) data = paddle.randint(0, 8, shape=[10,4]) emb_out = paddle.distributed.split( data, (8, 8), operation="embedding", num_partitions=2) + """ assert isinstance(size, (list, tuple)), ( "The type of size for " -- GitLab