diff --git a/python/paddle/fluid/tests/unittests/test_adam_op.py b/python/paddle/fluid/tests/unittests/test_adam_op.py index 2983e5ca1958ef5b65aa280692017de3cc706802..cd40d401c457f0c5f6d43145600c0bf510fc6ebc 100644 --- a/python/paddle/fluid/tests/unittests/test_adam_op.py +++ b/python/paddle/fluid/tests/unittests/test_adam_op.py @@ -15,7 +15,7 @@ import unittest import numpy as np -from op_test import OpTest +from eager_op_test import OpTest import paddle import paddle.fluid as fluid @@ -23,10 +23,47 @@ from paddle.fluid import core from paddle.fluid.op import Operator +def adam_wrapper( + param, + grad, + LearningRate, + moment1, + moment2, + beta1_pow, + beta2_pow, + master_weight=None, + find_inf=None, + beta1=0.78, + beta2=0.836, + epsilon=1e-4, + lazy_mode=False, +): + _, _, _, _, _, _ = paddle._C_ops.adam_( + param, + grad, + LearningRate, + moment1, + moment2, + beta1_pow, + beta2_pow, + master_weight, + find_inf, + beta1, + beta2, + epsilon, + lazy_mode, + 1000, + False, + False, + ) + + class TestAdamOp1(OpTest): def setUp(self): '''Test Adam Op with supplied attributes''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -73,6 +110,8 @@ class TestAdamOp2(OpTest): def setUp(self): '''Test Adam Op with supplied attributes''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] self.set_shape() param = np.random.uniform(-1, 1, self.shape).astype("float32") grad = np.random.uniform(-1, 1, self.shape).astype("float32") @@ -122,6 +161,8 @@ class TestAdamOpMultipleSteps(OpTest): def setUp(self): '''Test Adam Operator with supplied attributes''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] self.num_steps = 10 param = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -414,6 +455,8 @@ class TestAdamOpBetaVariable(OpTest): def setUp(self): '''Test Adam Op with beta as Variable''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -459,6 +502,8 @@ class TestAdamOpBetaEpsilonVariable(OpTest): def setUp(self): '''Test Adam Op with beta/epsilon as Variable''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -505,6 +550,8 @@ class TestAdamOpWithGlobalBetaPow(OpTest): def setUp(self): '''Test Adam Op with global_beta_pow''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -554,6 +601,8 @@ class TestAdamOpWithSkipUpdate(OpTest): def setUp(self): '''Test Adam Op with global_beta_pow''' self.op_type = "adam" + self.python_api = adam_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") diff --git a/python/paddle/fluid/tests/unittests/test_adamax_op.py b/python/paddle/fluid/tests/unittests/test_adamax_op.py index b3e93bdd7e3fef43ffd3285aea281558b7c69ca2..8b352771993d2a9d485ad17bc9421b6488cce66f 100644 --- a/python/paddle/fluid/tests/unittests/test_adamax_op.py +++ b/python/paddle/fluid/tests/unittests/test_adamax_op.py @@ -15,7 +15,33 @@ import unittest import numpy as np -from op_test import OpTest +from eager_op_test import OpTest + +import paddle + + +def adamx_wrapper( + param, + grad, + lr, + moment, + inf_norm, + beta1_pow=None, + beta1=0.78, + beta2=0.899, + epsilon=1e-5, +): + return paddle._C_ops.adamax_( + param, + grad, + lr, + moment, + inf_norm, + beta1_pow, + beta1, + beta2, + epsilon, + ) import paddle @@ -24,6 +50,8 @@ class TestAdamaxOp1(OpTest): def setUp(self): '''Test Adamax Operator with supplied attributes''' self.op_type = "adamax" + self.python_api = adamx_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -66,6 +94,8 @@ class TestAdamaxOp2(OpTest): def setUp(self): self.op_type = "adamax" + self.python_api = adamx_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -104,6 +134,8 @@ class TestAdamaxOpMultipleSteps(OpTest): def setUp(self): '''Test Adamax Operator with supplied attributes''' self.op_type = "adamax" + self.python_api = adamx_wrapper + self.python_out_sig = ['Out'] self.num_steps = 10 param = np.random.uniform(-1, 1, (102, 105)).astype("float32") diff --git a/python/paddle/fluid/tests/unittests/test_adamw_op.py b/python/paddle/fluid/tests/unittests/test_adamw_op.py index f227affca00e63f5eae0bbef250a5f17d2a321fd..9ab77d7c2a115a8028b46bb5d1c81f78b8ad057a 100644 --- a/python/paddle/fluid/tests/unittests/test_adamw_op.py +++ b/python/paddle/fluid/tests/unittests/test_adamw_op.py @@ -17,7 +17,7 @@ import unittest from functools import partial import numpy as np -from op_test import OpTest +from eager_op_test import OpTest import paddle import paddle.fluid as fluid @@ -60,10 +60,53 @@ def adamw_step(inputs, attributes): return param_out, moment1_out, moment2_out +def adamw_wrapper( + param, + grad, + lr, + moment1, + moment2, + beta1_pow, + beta2_pow, + master_weight=None, + found_inf=None, + beta1=0.78, + beta2=0.836, + epsilon=1e-4, + lr_ratio=1.0, + weight_decay=0.01, + with_decay=True, + lazy_mode=False, +): + _, _, _, _, _, _ = paddle._C_ops.adamw_( + param, + grad, + lr, + moment1, + moment2, + beta1_pow, + beta2_pow, + master_weight, + found_inf, + beta1, + beta2, + epsilon, + lr_ratio, + weight_decay, + with_decay, + lazy_mode, + 1000, + False, + False, + ) + + class TestAdamW(OpTest): def setUp(self): '''Test AdamW Op with supplied attributes''' self.op_type = "adamw" + self.python_api = adamw_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (102, 105)).astype("float32") grad = np.random.uniform(-1, 1, (102, 105)).astype("float32") moment1 = np.random.uniform(-1, 1, (102, 105)).astype("float32") @@ -118,6 +161,8 @@ class TestAdamW2(OpTest): def setUp(self): '''Test AdamW Op with supplied attributes''' self.op_type = "adamw" + self.python_api = adamw_wrapper + self.python_out_sig = ['Out'] param = np.random.uniform(-1, 1, (2, 2)).astype("float32") grad = np.random.uniform(-1, 1, (2, 2)).astype("float32") moment1 = np.random.uniform(-1, 1, (2, 2)).astype("float32") diff --git a/python/paddle/fluid/tests/unittests/test_lamb_op.py b/python/paddle/fluid/tests/unittests/test_lamb_op.py index b4a80021343dd576e4475d59332d051a8a1e4cb9..caa58df7169dac2de80ea68664a4d191b451ffec 100644 --- a/python/paddle/fluid/tests/unittests/test_lamb_op.py +++ b/python/paddle/fluid/tests/unittests/test_lamb_op.py @@ -15,7 +15,7 @@ import unittest import numpy as np -from op_test import OpTest +from eager_op_test import OpTest import paddle from paddle.fluid import core @@ -32,6 +32,8 @@ def lamb_wrapper( moment2, beta1Pow, beta2Pow, + master_weight=None, + found_inf=None, epsilon=1e-8, beta1=0.9, beta2=0.999, @@ -45,8 +47,8 @@ def lamb_wrapper( moment2, beta1Pow, beta2Pow, - None, - None, + master_weight, + found_inf, weight_decay, beta1, beta2,