From d9d35927659f4d19b6a3c63e13db072ba0535d64 Mon Sep 17 00:00:00 2001 From: niuliling123 <51102941+niuliling123@users.noreply.github.com> Date: Fri, 17 Mar 2023 14:18:50 +0800 Subject: [PATCH] Fix untest error in test_adamax_op (#51692) --- python/paddle/fluid/optimizer.py | 2 +- python/paddle/fluid/tests/unittests/test_adamax_op.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index e1051db52b4..6a97ab0cbd8 100755 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -2809,7 +2809,7 @@ class AdamaxOptimizer(Optimizer): with param.block.program._optimized_guard( [param, grad] ), name_scope('adamx'): - beta1_pow_acc = self._get_accumulator( + beta1_pow_acc = self._get_accumulator_master( self._beta1_pow_acc_str, param ) if in_dygraph_mode(): diff --git a/python/paddle/fluid/tests/unittests/test_adamax_op.py b/python/paddle/fluid/tests/unittests/test_adamax_op.py index 7d4c2849bdc..8acad2b4bfb 100644 --- a/python/paddle/fluid/tests/unittests/test_adamax_op.py +++ b/python/paddle/fluid/tests/unittests/test_adamax_op.py @@ -31,6 +31,7 @@ def adamx_wrapper( beta1=0.78, beta2=0.899, epsilon=1e-5, + find_master=False, ): return paddle._C_ops.adamax_( param, @@ -43,6 +44,7 @@ def adamx_wrapper( beta1, beta2, epsilon, + find_master, ) -- GitLab