From f7d1a94042adbd7613f66c48f726356e24a66043 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E6=98=A5=E4=B9=94?= <83450930+Liyulingyue@users.noreply.github.com> Date: Tue, 22 Aug 2023 19:16:16 +0800 Subject: [PATCH] [xdoctest] reformat example code with google style in No. 246 and 247 (#56475) --- python/paddle/incubate/autotune.py | 46 +++++++++---------- .../paddle/incubate/distributed/fleet/base.py | 13 +++--- 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/python/paddle/incubate/autotune.py b/python/paddle/incubate/autotune.py index dfad1dc58c9..a4b9cbe7006 100644 --- a/python/paddle/incubate/autotune.py +++ b/python/paddle/incubate/autotune.py @@ -56,29 +56,29 @@ def set_config(config=None): Examples: .. code-block:: python - import paddle - import json - - # config is a dict. - config = { - "kernel": { - "enable": True, - "tuning_range": [1, 5], - }, - "layout": { - "enable": True, - }, - "dataloader": { - "enable": True, - } - } - paddle.incubate.autotune.set_config(config) - - # config is the path of json file. - config_json = json.dumps(config) - with open('config.json', 'w') as json_file: - json_file.write(config_json) - paddle.incubate.autotune.set_config('config.json') + >>> import paddle + >>> import json + + >>> # config is a dict. + >>> config = { + ... "kernel": { + ... "enable": True, + ... "tuning_range": [1, 5], + ... }, + ... "layout": { + ... "enable": True, + ... }, + ... "dataloader": { + ... "enable": True, + ... } + >>> } + >>> paddle.incubate.autotune.set_config(config) + + >>> # config is the path of json file. + >>> config_json = json.dumps(config) + >>> with open('config.json', 'w') as json_file: + ... json_file.write(config_json) + >>> paddle.incubate.autotune.set_config('config.json') """ if config is None: diff --git a/python/paddle/incubate/distributed/fleet/base.py b/python/paddle/incubate/distributed/fleet/base.py index a9eda099f72..81d071bf982 100644 --- a/python/paddle/incubate/distributed/fleet/base.py +++ b/python/paddle/incubate/distributed/fleet/base.py @@ -343,12 +343,13 @@ class DistributedOptimizer(metaclass=abc.ABCMeta): Examples: .. code-block:: python - loss = network() - optimizer = fluid.optimizer.SGD(learning_rate=0.1) - params_grads = optimizer.backward(loss) - # you may append operations for params_grads here - # ... - optimizer.apply_gradients(params_grads) + >>> # doctest: +SKIP('The network is not defined.') + >>> loss = network() + >>> optimizer = fluid.optimizer.SGD(learning_rate=0.1) + >>> params_grads = optimizer.backward(loss) + >>> # you may append operations for params_grads here + >>> # ... + >>> optimizer.apply_gradients(params_grads) """ pass -- GitLab