未验证 提交 c958ba74 编写于 作者: 小飞猪 提交者: GitHub

[xdoctest][task 248-249,266-267,269] reformat example code with google style...

[xdoctest][task 248-249,266-267,269] reformat example code with google style in `incubate/distributed/fleet/*`,`incubate/nn/layer/*` (#56772)

* [Doctest]fix No.248-249,266-267,269, test=docs_preview

* fix style

* fix

* add env:DISTRIBUTED
上级 e9364a38
......@@ -55,11 +55,13 @@ class HashName(PSDispatcher):
Examples:
.. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
vars = ["var1","var2","var3","var4","var5"]
>>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin
rr = RoundRobin(pserver_endpoints)
rr.dispatch(vars)
>>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
>>> vars = ["var1","var2","var3","var4","var5"]
>>> rr = HashName(pserver_endpoints)
>>> rr.dispatch(vars)
"""
......@@ -95,11 +97,13 @@ class RoundRobin(PSDispatcher):
Examples:
.. code-block:: python
pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
vars = ["var1","var2","var3","var4","var5"]
>>> from paddle.incubate.distributed.fleet.parameter_server.ir.ps_dispatcher import RoundRobin
>>> pserver_endpoints = ["127.0.0.1:6007", "127.0.0.1:6008"]
>>> vars = ["var1","var2","var3","var4","var5"]
rr = RoundRobin(pserver_endpoints)
rr.dispatch(vars)
>>> rr = RoundRobin(pserver_endpoints)
>>> rr.dispatch(vars)
"""
......
......@@ -46,15 +46,17 @@ class FusedEcMoe(Layer):
Examples:
.. code-block:: python
# required: gpu
import paddle
from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe
>>> # doctest: +REQUIRES(env:GPU)
>>> import paddle
>>> paddle.device.set_device('gpu')
>>> from paddle.incubate.nn.layer.fused_ec_moe import FusedEcMoe
x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model]
gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts]
moe = FusedEcMoe(1024, 4096, 8, act_type="gelu")
y = moe(x, gate)
print(y.shape) # [10, 128, 1024]
>>> x = paddle.randn([10, 128, 1024]) # [bsz, seq_len, d_model]
>>> gate = paddle.randn([10, 128, 8]) # [bsz, seq_len, num_experts]
>>> moe = FusedEcMoe(1024, 4096, 8, act_type="gelu")
>>> y = moe(x, gate)
>>> print(y.shape)
[10, 128, 1024]
"""
def __init__(
......
......@@ -56,14 +56,16 @@ class FusedLinear(Layer):
Examples:
.. code-block:: python
# required: gpu
import paddle
from paddle.incubate.nn import FusedLinear
>>> # doctest: +REQUIRES(env:GPU)
>>> import paddle
>>> paddle.device.set_device('gpu')
>>> from paddle.incubate.nn import FusedLinear
x = paddle.randn([3, 4])
linear = FusedLinear(4, 5)
y = linear(x)
print(y.shape) # [3, 5]
>>> x = paddle.randn([3, 4])
>>> linear = FusedLinear(4, 5)
>>> y = linear(x)
>>> print(y.shape)
[3, 5]
"""
def __init__(
......
......@@ -55,21 +55,25 @@ class ListenAndServ:
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
with fluid.program_guard(main):
serv = layers.ListenAndServ(
"127.0.0.1:6170", ["X"], optimizer_mode=False)
with serv.do():
x = paddle.static.data(
shape=[32, 32],
dtype='float32',
name="X")
paddle.nn.initializer.Constant(value=1.0)(x, main.global_block())
paddle.scale(x=x, scale=10.0, out=out_var)
exe = fluid.Executor(place)
exe.run(main)
>>> # doctest: +REQUIRES(env:DISTRIBUTED)
>>> from paddle.incubate.nn.layer.io import ListenAndServ
>>> import paddle
>>> paddle.enable_static()
>>> place = paddle.CPUPlace()
>>> main = paddle.static.Program()
>>> with paddle.static.program_guard(main):
... serv = ListenAndServ(
... "127.0.0.1:6170", ["X"], optimizer_mode=False)
... with serv.do():
... x = paddle.static.data(
... shape=[32, 32],
... dtype='float32',
... name="X")
... paddle.nn.initializer.Constant(value=1.0)(x, main.global_block())
... paddle.scale(x=x, scale=10.0)
>>> exe = paddle.static.Executor(place)
>>> exe.run(main)
"""
def __init__(self, endpoint, inputs, fan_in=1, optimizer_mode=True):
......@@ -115,7 +119,9 @@ class ListenAndServ:
return parent_block
def complete_op(self):
from paddle.incubate.fleet.parameter_server.mode import DistributedMode
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
main_program = self.helper.main_program
current_block = main_program.current_block()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册