未验证 提交 f9c51e8c 编写于 作者: I iLeGend 提交者: GitHub

[xdoctest][task 213,215-217] reformat example code with google style in...

[xdoctest][task 213,215-217] reformat example code with google style in `python/paddle/distributed/fleet/base` (#56651)

* [xdoctest][task 213,215-217] reformat example code with google style in python/paddle/distributed/fleet/base

* fix output as comments
上级 c7727885
...@@ -36,14 +36,14 @@ class OrthogonalStrategy: ...@@ -36,14 +36,14 @@ class OrthogonalStrategy:
Examples: Examples:
.. code-block:: python .. code-block:: python
# required: distributed >>> # doctest: +REQUIRES(env: DISTRIBUTED)
import paddle >>> import paddle
import paddle.distributed as dist >>> import paddle.distributed as dist
from paddle.distributed.fleet.base.strategy_group import DPGroup, MPGroup, PPGroup >>> from paddle.distributed.fleet.base.strategy_group import DPGroup, MPGroup, PPGroup
from paddle.distributed.fleet.base.orthogonal_strategy import OrthogonalStrategy >>> from paddle.distributed.fleet.base.orthogonal_strategy import OrthogonalStrategy
dist.init_parallel_env() >>> dist.init_parallel_env()
strategy = OrthogonalStrategy([("dp", 2, DPGroup), ("mp", 2, MPGroup), ("pp", 2, PPGroup)], fused_strategy_dict={"check": ["mp", "pp"]}) >>> strategy = OrthogonalStrategy([("dp", 2, DPGroup), ("mp", 2, MPGroup), ("pp", 2, PPGroup)], fused_strategy_dict={"check": ["mp", "pp"]})
""" """
......
...@@ -29,12 +29,15 @@ class StrategyGroupBase: ...@@ -29,12 +29,15 @@ class StrategyGroupBase:
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle.distributed as dist >>> # doctest: +REQUIRES(env: DISTRIBUTED)
from paddle.distributed.fleet.base.strategy_group import StrategyGroupBase >>> import paddle.distributed as dist
>>> from paddle.distributed.fleet.base.strategy_group import StrategyGroupBase
>>> dist.init_parallel_env()
>>> strategy_group = dist.fleet.base.strategy_group.StrategyGroupBase([[0, 1], [2, 3]])
>>> print(strategy_group.world_size)
2
dist.init_parallel_env()
strategy_group = dist.fleet.base.strategy_group.StrategyGroupBase([[0, 1], [2, 3]])
print(strategy_group.world_size) # 2
""" """
......
...@@ -43,9 +43,11 @@ class ParallelMode: ...@@ -43,9 +43,11 @@ class ParallelMode:
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle >>> # doctest: +REQUIRES(env: DISTRIBUTED)
parallel_mode = paddle.distributed.ParallelMode >>> import paddle
print(parallel_mode.DATA_PARALLEL) # 0 >>> parallel_mode = paddle.distributed.ParallelMode
>>> print(parallel_mode.DATA_PARALLEL)
0
""" """
......
...@@ -78,37 +78,36 @@ class UtilBase: ...@@ -78,37 +78,36 @@ class UtilBase:
Examples: Examples:
.. code-block:: python .. code-block:: python
# Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` . >>> # doctest: +REQUIRES(env: DISTRIBUTED)
import paddle.distributed.fleet as fleet >>> # Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` .
from paddle.distributed.fleet import PaddleCloudRoleMaker >>> import paddle.distributed.fleet as fleet
import sys >>> from paddle.distributed.fleet import PaddleCloudRoleMaker
import numpy as np >>> import sys
import os >>> import numpy as np
>>> import os
os.environ["PADDLE_WITH_GLOO"] = "2"
>>> os.environ["PADDLE_WITH_GLOO"] = "2"
def train():
role = PaddleCloudRoleMaker( >>> def train():
is_collective=False, ... role = PaddleCloudRoleMaker(
init_gloo=True, ... is_collective=False,
path="./tmp_gloo") ... init_gloo=True,
fleet.init(role) ... path="./tmp_gloo")
... fleet.init(role)
if fleet.is_server(): ...
input = [1, 2] ... if fleet.is_server():
output = fleet.util.all_reduce(input, "sum", "server") ... input = np.array([1, 2])
print(output) ... output = fleet.util.all_reduce(input, "sum", "server")
# [2, 4] ... print(output) # [2, 4]
elif fleet.is_worker(): ... elif fleet.is_worker():
input = np.array([3, 4]) ... input = np.array([3, 4])
output = fleet.util.all_reduce(input, "sum", "worker") ... output = fleet.util.all_reduce(input, "sum", "worker")
print(output) ... print(output) # [6, 8]
# [6, 8] ... output = fleet.util.all_reduce(input, "sum", "all")
output = fleet.util.all_reduce(input, "sum", "all") ... print(output) # [8, 12]
print(output)
# [8, 12] >>> if __name__ == "__main__":
if __name__ == "__main__": ... train()
train()
""" """
return self.role_maker._all_reduce(input, mode, comm_world) return self.role_maker._all_reduce(input, mode, comm_world)
...@@ -123,33 +122,33 @@ class UtilBase: ...@@ -123,33 +122,33 @@ class UtilBase:
.. code-block:: python .. code-block:: python
# Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` . >>> # doctest: +REQUIRES(env: DISTRIBUTED)
>>> # Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` .
import paddle.distributed.fleet as fleet >>> import paddle.distributed.fleet as fleet
from paddle.distributed.fleet import PaddleCloudRoleMaker >>> from paddle.distributed.fleet import PaddleCloudRoleMaker
import sys >>> import sys
import os >>> import os
os.environ["PADDLE_WITH_GLOO"] = "2" >>> os.environ["PADDLE_WITH_GLOO"] = "2"
def train(): >>> def train():
role = PaddleCloudRoleMaker( ... role = PaddleCloudRoleMaker(
is_collective=False, ... is_collective=False,
init_gloo=True, ... init_gloo=True,
path="./tmp_gloo") ... path="./tmp_gloo")
fleet.init(role) ... fleet.init(role)
...
if fleet.is_server(): ... if fleet.is_server():
fleet.util.barrier("server") ... fleet.util.barrier("server")
print("all server arrive here") ... print("all server arrive here") # all server arrive here
elif fleet.is_worker(): ... elif fleet.is_worker():
fleet.util.barrier("worker") ... fleet.util.barrier("worker")
print("all server arrive here") ... print("all server arrive here") # all server arrive here
fleet.util.barrier("all") ... fleet.util.barrier("all")
print("all servers and workers arrive here") ... print("all servers and workers arrive here") #all servers and workers arrive here
if __name__ == "__main__": >>> if __name__ == "__main__":
train() ... train()
""" """
self.role_maker._barrier(comm_world) self.role_maker._barrier(comm_world)
...@@ -168,37 +167,35 @@ class UtilBase: ...@@ -168,37 +167,35 @@ class UtilBase:
.. code-block:: python .. code-block:: python
# Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` . >>> # doctest: +REQUIRES(env: DISTRIBUTED)
import paddle.distributed.fleet as fleet >>> # Save the following code in `train.py` , and then execute the command `fleetrun --server_num 2 --worker_num 2 train.py` .
from paddle.distributed.fleet import PaddleCloudRoleMaker >>> import paddle.distributed.fleet as fleet
import sys >>> from paddle.distributed.fleet import PaddleCloudRoleMaker
import os >>> import sys
>>> import os
os.environ["PADDLE_WITH_GLOO"] = "2"
>>> os.environ["PADDLE_WITH_GLOO"] = "2"
def train():
role = PaddleCloudRoleMaker( >>> def train():
is_collective=False, ... role = PaddleCloudRoleMaker(
init_gloo=True, ... is_collective=False,
path="./tmp_gloo") ... init_gloo=True,
fleet.init(role) ... path="./tmp_gloo")
... fleet.init(role)
if fleet.is_server(): ...
input = fleet.server_index() ... if fleet.is_server():
output = fleet.util.all_gather(input, "server") ... input = fleet.server_index()
print(output) ... output = fleet.util.all_gather(input, "server")
# output = [0, 1] ... print(output) # [0, 1]
elif fleet.is_worker(): ... elif fleet.is_worker():
input = fleet.worker_index() ... input = fleet.worker_index()
output = fleet.util.all_gather(input, "worker") ... output = fleet.util.all_gather(input, "worker")
# output = [0, 1] ... print(output) # [0, 1]
print(output) ... output = fleet.util.all_gather(input, "all")
output = fleet.util.all_gather(input, "all") ... print(output) # [0, 1, 0, 1]
print(output)
# output = [0, 1, 0, 1] >>> if __name__ == "__main__":
... train()
if __name__ == "__main__":
train()
""" """
return self.role_maker._all_gather(input, comm_world) return self.role_maker._all_gather(input, comm_world)
...@@ -250,21 +247,22 @@ class UtilBase: ...@@ -250,21 +247,22 @@ class UtilBase:
.. code-block:: python .. code-block:: python
import paddle.distributed.fleet as fleet >>> # doctest: +REQUIRES(env: DISTRIBUTED)
from paddle.distributed.fleet import UserDefinedRoleMaker >>> import paddle.distributed.fleet as fleet
>>> from paddle.distributed.fleet import UserDefinedRoleMaker
role = UserDefinedRoleMaker(
is_collective=False, >>> role = UserDefinedRoleMaker(
init_gloo=False, ... is_collective=False,
current_id=0, ... init_gloo=False,
role=fleet.Role.WORKER, ... current_id=0,
worker_endpoints=["127.0.0.1:6003", "127.0.0.1:6004"], ... role=fleet.Role.WORKER,
server_endpoints=["127.0.0.1:6001", "127.0.0.1:6002"]) ... worker_endpoints=["127.0.0.1:6003", "127.0.0.1:6004"],
fleet.init(role) ... server_endpoints=["127.0.0.1:6001", "127.0.0.1:6002"])
>>> fleet.init(role)
files = fleet.util.get_file_shard(["file1", "file2", "file3"])
print(files) >>> files = fleet.util.get_file_shard(["file1", "file2", "file3"])
# files = ["file1", "file2"] >>> print(files)
["file1", "file2"]
""" """
if not isinstance(files, list): if not isinstance(files, list):
raise TypeError("files should be a list of file need to be read.") raise TypeError("files should be a list of file need to be read.")
...@@ -299,19 +297,21 @@ class UtilBase: ...@@ -299,19 +297,21 @@ class UtilBase:
.. code-block:: python .. code-block:: python
import paddle.distributed.fleet as fleet >>> # doctest: +REQUIRES(env: DISTRIBUTED)
from paddle.distributed.fleet import UserDefinedRoleMaker >>> import paddle.distributed.fleet as fleet
>>> from paddle.distributed.fleet import UserDefinedRoleMaker
role = UserDefinedRoleMaker(
is_collective=False, >>> role = UserDefinedRoleMaker(
init_gloo=False, ... is_collective=False,
current_id=0, ... init_gloo=False,
role=fleet.Role.WORKER, ... current_id=0,
worker_endpoints=["127.0.0.1:6003", "127.0.0.1:6004"], ... role=fleet.Role.WORKER,
server_endpoints=["127.0.0.1:6001", "127.0.0.1:6002"]) ... worker_endpoints=["127.0.0.1:6003", "127.0.0.1:6004"],
fleet.init(role) ... server_endpoints=["127.0.0.1:6001", "127.0.0.1:6002"])
>>> fleet.init(role)
fleet.util.print_on_rank("I'm worker 0", 0)
>>> fleet.util.print_on_rank("I'm worker 0", 0)
I'm worker 0
""" """
if self.role_maker._worker_index() != rank_id: if self.role_maker._worker_index() != rank_id:
return return
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册