未验证 提交 f7d1a940 编写于 作者: 张春乔 提交者: GitHub

[xdoctest] reformat example code with google style in No. 246 and 247 (#56475)

上级 17d6da6b
...@@ -56,29 +56,29 @@ def set_config(config=None): ...@@ -56,29 +56,29 @@ def set_config(config=None):
Examples: Examples:
.. code-block:: python .. code-block:: python
import paddle >>> import paddle
import json >>> import json
# config is a dict. >>> # config is a dict.
config = { >>> config = {
"kernel": { ... "kernel": {
"enable": True, ... "enable": True,
"tuning_range": [1, 5], ... "tuning_range": [1, 5],
}, ... },
"layout": { ... "layout": {
"enable": True, ... "enable": True,
}, ... },
"dataloader": { ... "dataloader": {
"enable": True, ... "enable": True,
} ... }
} >>> }
paddle.incubate.autotune.set_config(config) >>> paddle.incubate.autotune.set_config(config)
# config is the path of json file. >>> # config is the path of json file.
config_json = json.dumps(config) >>> config_json = json.dumps(config)
with open('config.json', 'w') as json_file: >>> with open('config.json', 'w') as json_file:
json_file.write(config_json) ... json_file.write(config_json)
paddle.incubate.autotune.set_config('config.json') >>> paddle.incubate.autotune.set_config('config.json')
""" """
if config is None: if config is None:
......
...@@ -343,12 +343,13 @@ class DistributedOptimizer(metaclass=abc.ABCMeta): ...@@ -343,12 +343,13 @@ class DistributedOptimizer(metaclass=abc.ABCMeta):
Examples: Examples:
.. code-block:: python .. code-block:: python
loss = network() >>> # doctest: +SKIP('The network is not defined.')
optimizer = fluid.optimizer.SGD(learning_rate=0.1) >>> loss = network()
params_grads = optimizer.backward(loss) >>> optimizer = fluid.optimizer.SGD(learning_rate=0.1)
# you may append operations for params_grads here >>> params_grads = optimizer.backward(loss)
# ... >>> # you may append operations for params_grads here
optimizer.apply_gradients(params_grads) >>> # ...
>>> optimizer.apply_gradients(params_grads)
""" """
pass pass
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册