From c0ee14f6767cda4f38566c2f23eb364e3354c935 Mon Sep 17 00:00:00 2001 From: hua-zi <83271073+hua-zi@users.noreply.github.com> Date: Thu, 27 Apr 2023 11:12:24 +0800 Subject: [PATCH] updata Adamw.py (#52984) * updata Adamw.py out.backward() -> loss.backward() * Update adamw.py --- python/paddle/optimizer/adamw.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/paddle/optimizer/adamw.py b/python/paddle/optimizer/adamw.py index a525ac194a1..f8e00eabecf 100644 --- a/python/paddle/optimizer/adamw.py +++ b/python/paddle/optimizer/adamw.py @@ -90,7 +90,7 @@ class AdamW(Optimizer): name (str, optional): Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`. The default value is None. - **Notes**: + Notes: **Currently, AdamW doesn't support sparse parameter optimization.** Examples: @@ -111,7 +111,7 @@ class AdamW(Optimizer): beta1=beta1, beta2=beta2, weight_decay=0.01) - out.backward() + loss.backward() opt.step() opt.clear_grad() @@ -135,7 +135,7 @@ class AdamW(Optimizer): }], weight_decay=0.01, beta1=0.9) - out.backward() + loss.backward() opt.step() opt.clear_grad() -- GitLab