From b0b827c7947a0fbefda94e30393d0cfe3968a5c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E6=98=A5=E4=B9=94?= <83450930+Liyulingyue@users.noreply.github.com> Date: Tue, 29 Aug 2023 19:34:57 +0800 Subject: [PATCH] [xdoctest] reformat example code with google style in No. 299 (#56597) * Update dlpack.py * Apply suggestions from code review * Apply suggestions from code review * xdoc * Apply suggestions from code review * Apply suggestions from code review --- python/paddle/incubate/optimizer/lbfgs.py | 76 +++++++++++------------ 1 file changed, 37 insertions(+), 39 deletions(-) diff --git a/python/paddle/incubate/optimizer/lbfgs.py b/python/paddle/incubate/optimizer/lbfgs.py index ae7511ae03e..04d90d1f8c2 100644 --- a/python/paddle/incubate/optimizer/lbfgs.py +++ b/python/paddle/incubate/optimizer/lbfgs.py @@ -76,45 +76,43 @@ class LBFGS(Optimizer): Examples: .. code-block:: python - import paddle - import numpy as np - from paddle.incubate.optimizer import LBFGS - - paddle.disable_static() - np.random.seed(0) - np_w = np.random.rand(1).astype(np.float32) - np_x = np.random.rand(1).astype(np.float32) - - inputs = [np.random.rand(1).astype(np.float32) for i in range(10)] - # y = 2x - targets = [2 * x for x in inputs] - - class Net(paddle.nn.Layer): - def __init__(self): - super().__init__() - w = paddle.to_tensor(np_w) - self.w = paddle.create_parameter(shape=w.shape, dtype=w.dtype, default_initializer=paddle.nn.initializer.Assign(w)) - - def forward(self, x): - return self.w * x - - net = Net() - opt = LBFGS(learning_rate=1, max_iter=1, max_eval=None, tolerance_grad=1e-07, tolerance_change=1e-09, history_size=100, line_search_fn='strong_wolfe', parameters=net.parameters()) - def train_step(inputs, targets): - def closure(): - outputs = net(inputs) - loss = paddle.nn.functional.mse_loss(outputs, targets) - print('loss: ', loss.item()) - opt.clear_grad() - loss.backward() - return loss - opt.step(closure) - - - for input, target in zip(inputs, targets): - input = paddle.to_tensor(input) - target = paddle.to_tensor(target) - train_step(input, target) + >>> import paddle + >>> import numpy as np + >>> from paddle.incubate.optimizer import LBFGS + + >>> paddle.disable_static() + >>> np.random.seed(0) + >>> np_w = np.random.rand(1).astype(np.float32) + >>> np_x = np.random.rand(1).astype(np.float32) + + >>> inputs = [np.random.rand(1).astype(np.float32) for i in range(10)] + >>> # y = 2x + >>> targets = [2 * x for x in inputs] + + >>> class Net(paddle.nn.Layer): + ... def __init__(self): + ... super().__init__() + ... w = paddle.to_tensor(np_w) + ... self.w = paddle.create_parameter(shape=w.shape, dtype=w.dtype, default_initializer=paddle.nn.initializer.Assign(w)) + ... def forward(self, x): + ... return self.w * x + + >>> net = Net() + >>> opt = LBFGS(learning_rate=1, max_iter=1, max_eval=None, tolerance_grad=1e-07, tolerance_change=1e-09, history_size=100, line_search_fn='strong_wolfe', parameters=net.parameters()) + >>> def train_step(inputs, targets): + ... def closure(): + ... outputs = net(inputs) + ... loss = paddle.nn.functional.mse_loss(outputs, targets) + ... print('loss: ', loss.item()) + ... opt.clear_grad() + ... loss.backward() + ... return loss + ... opt.step(closure) + + >>> for input, target in zip(inputs, targets): + ... input = paddle.to_tensor(input) + ... target = paddle.to_tensor(target) + ... train_step(input, target) """ -- GitLab