未验证 提交 56dc8c79 编写于 作者: Z Zhanlue Yang 提交者: GitHub

Enabled eager_mode for complex unit tests, except for test_complex_op.py and...

Enabled eager_mode for complex unit tests, except for test_complex_op.py and test_complex_view_op.py (#40887)
上级 8fe8039e
......@@ -20,11 +20,13 @@ import numpy as np
import paddle
import paddle.fluid.dygraph as dg
from op_test import OpTest
from paddle.fluid.framework import _test_eager_guard
class TestComplexAbsOp(OpTest):
def setUp(self):
paddle.enable_static()
self.python_api = paddle.abs
self.op_type = "abs"
self.dtype = np.float64
self.shape = (2, 3, 4, 5)
......@@ -44,20 +46,22 @@ class TestComplexAbsOp(OpTest):
self.grad_x = self.grad_out * (self.x / np.abs(self.x))
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[self.grad_x],
user_defined_grad_outputs=[self.grad_out])
user_defined_grad_outputs=[self.grad_out],
check_eager=True)
class TestComplexAbsOpZeroValues(OpTest):
def setUp(self):
paddle.enable_static()
self.op_type = "abs"
self.python_api = paddle.abs
self.dtype = np.float64
self.shape = (2, 3, 4, 5)
self.init_input_output()
......@@ -76,14 +80,15 @@ class TestComplexAbsOpZeroValues(OpTest):
self.grad_x = np.zeros(self.shape, self.dtype)
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[self.grad_x],
user_defined_grad_outputs=[self.grad_out])
user_defined_grad_outputs=[self.grad_out],
check_eager=True)
class TestAbs(unittest.TestCase):
......@@ -101,10 +106,15 @@ class TestAbs(unittest.TestCase):
y = paddle.abs(paddle.to_tensor(x))
self.assertTrue(np.allclose(np.abs(x), y.numpy()))
def test_eager(self):
with _test_eager_guard():
self.test_all_positive()
class TestRealAbsOp(OpTest):
def setUp(self):
paddle.enable_static()
self.python_api = paddle.abs
self.op_type = "abs"
self.dtype = np.float64
self.shape = (2, 3, 4, 5)
......@@ -123,14 +133,15 @@ class TestRealAbsOp(OpTest):
self.grad_x = self.grad_out * (self.x / np.abs(self.x))
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[self.grad_x],
user_defined_grad_outputs=[self.grad_out])
user_defined_grad_outputs=[self.grad_out],
check_eager=True)
if __name__ == '__main__':
......
......@@ -18,6 +18,7 @@ import unittest
import numpy as np
import paddle
from paddle.fluid.framework import _test_eager_guard
class TestComplexCastOp(unittest.TestCase):
......@@ -68,6 +69,12 @@ class TestComplexCastOp(unittest.TestCase):
self.assertTrue(
np.allclose(c_128.cast('complex128').numpy(), c_64.numpy()))
def test_eager(self):
with _test_eager_guard():
self.test_complex64_complex128()
self.test_real_to_complex()
self.test_complex_to_real()
if __name__ == '__main__':
unittest.main()
......@@ -19,6 +19,7 @@ from numpy.random import random as rand
import paddle
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
paddle_apis = {
"add": paddle.add,
......@@ -98,6 +99,12 @@ class TestComplexElementwiseLayers(unittest.TestCase):
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
def test_eager(self):
with _test_eager_guard():
self.test_real_x_complex_y()
self.test_complex_x_real_y()
self.test_complex_xy()
if __name__ == '__main__':
unittest.main()
......@@ -17,6 +17,7 @@ import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
class TestComplexGetitemLayer(unittest.TestCase):
......@@ -93,6 +94,15 @@ class TestComplexGetitemLayer(unittest.TestCase):
np.testing.assert_allclose(x_var_slice.numpy(), x_np_slice)
def test_eager(self):
with _test_eager_guard():
self.test_case1()
self.test_case2()
self.test_case3()
self.test_case4()
self.test_case5()
self.test_case6()
if __name__ == '__main__':
unittest.main()
......@@ -20,6 +20,7 @@ import numpy as np
import paddle
import paddle.fluid.core as core
from paddle.fluid.framework import _test_eager_guard
class Optimization_ex1(paddle.nn.Layer):
......@@ -119,6 +120,12 @@ class TestComplexGradAccumulated(unittest.TestCase):
self.train(dev, dtype, 3)
self.train_no_clear_grad(dev, dtype, 3)
def test_eager(self):
with _test_eager_guard():
self.test_case_one_step()
self.test_case_two_step()
self.test_case_non_param()
if __name__ == '__main__':
unittest.main()
......@@ -17,6 +17,7 @@ import paddle
import paddle.fluid.dygraph as dg
import numpy as np
import unittest
from paddle.fluid.framework import _test_eager_guard
class ComplexKronTestCase(unittest.TestCase):
......@@ -34,6 +35,7 @@ class ComplexKronTestCase(unittest.TestCase):
def runTest(self):
for place in self._places:
self.test_kron_api(place)
self.test_eager(place)
def test_kron_api(self, place):
with dg.guard(place):
......@@ -42,6 +44,10 @@ class ComplexKronTestCase(unittest.TestCase):
out_var = paddle.kron(x_var, y_var)
self.assertTrue(np.allclose(out_var.numpy(), self.ref_result))
def test_eager(self, place):
with _test_eager_guard():
self.test_kron_api(place)
def load_tests(loader, standard_tests, pattern):
suite = unittest.TestSuite()
......
......@@ -17,6 +17,7 @@ import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
class TestComplexMatMulLayer(unittest.TestCase):
......@@ -121,6 +122,14 @@ class TestComplexMatMulLayer(unittest.TestCase):
self.compare_by_basic_api(x, y, np_result)
self.compare_op_by_basic_api(x, y, np_result)
def test_eager(self):
with _test_eager_guard():
self.test_complex_xy_gemm()
self.test_complex_xy_gemv()
self.test_real_x_complex_y()
self.test_complex_x_real_y()
self.test_complex_xy()
if __name__ == '__main__':
unittest.main()
......@@ -21,6 +21,8 @@ from op_test import OpTest
import paddle
from paddle.fluid import dygraph
from paddle import static
from paddle.fluid.framework import _test_eager_guard
paddle.enable_static()
......@@ -65,7 +67,7 @@ class TestComplexOp(OpTest):
self.outputs = {'Out': out_ref}
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
dout = self.out_grad
......@@ -75,7 +77,8 @@ class TestComplexOp(OpTest):
['X', 'Y'],
'Out',
user_defined_grads=[dx, dy],
user_defined_grad_outputs=[dout])
user_defined_grad_outputs=[dout],
check_eager=True)
def test_check_grad_ignore_x(self):
dout = self.out_grad
......@@ -88,7 +91,8 @@ class TestComplexOp(OpTest):
'Out',
no_grad_set=set('X'),
user_defined_grads=[dy],
user_defined_grad_outputs=[dout])
user_defined_grad_outputs=[dout],
check_eager=True)
def test_check_grad_ignore_y(self):
dout = self.out_grad
......@@ -99,7 +103,8 @@ class TestComplexOp(OpTest):
'Out',
no_grad_set=set('Y'),
user_defined_grads=[dx],
user_defined_grad_outputs=[dout])
user_defined_grad_outputs=[dout],
check_eager=True)
class TestComplexOpBroadcast1(TestComplexOp):
......@@ -151,6 +156,10 @@ class TestComplexAPI(unittest.TestCase):
fetch_list=[out])
self.assertTrue(np.allclose(self.out, out_np))
def test_eager(self):
with _test_eager_guard():
self.test_dygraph()
if __name__ == "__main__":
unittest.main()
......@@ -17,6 +17,7 @@ import paddle
import paddle.fluid.dygraph as dg
import numpy as np
import unittest
from paddle.fluid.framework import _test_eager_guard
class TestComplexReshape(unittest.TestCase):
......@@ -53,6 +54,11 @@ class TestComplexReshape(unittest.TestCase):
y_np = y_var.numpy()
self.assertTrue(np.allclose(np.reshape(x_np, shape_), y_np))
def test_eager(self):
with _test_eager_guard():
self.test_shape_norm_dims()
self.test_shape_omit_dims()
if __name__ == "__main__":
unittest.main()
......@@ -20,6 +20,7 @@ import numpy as np
import paddle
import paddle.fluid.core as core
from paddle.fluid.framework import _test_eager_guard
class Optimization_ex1(paddle.nn.Layer):
......@@ -67,6 +68,10 @@ class TestComplexSimpleNet(unittest.TestCase):
for dev in self.devices:
self.train(dev)
def test_eager(self):
with _test_eager_guard():
self.test_train_success()
if __name__ == '__main__':
unittest.main()
......@@ -19,6 +19,7 @@ from numpy.random import random as rand
from paddle import tensor
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
class TestComplexSumLayer(unittest.TestCase):
......@@ -39,6 +40,10 @@ class TestComplexSumLayer(unittest.TestCase):
target = np.sum(input, axis=(1, 2))
self.assertTrue(np.allclose(result, target))
def test_eager(self):
with _test_eager_guard():
self.test_complex_basic_api()
if __name__ == '__main__':
unittest.main()
......@@ -19,6 +19,7 @@ from numpy.random import random as rand
from paddle import tensor
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
class TestComplexTraceLayer(unittest.TestCase):
......@@ -40,6 +41,10 @@ class TestComplexTraceLayer(unittest.TestCase):
target = np.trace(input, offset=1, axis1=0, axis2=2)
self.assertTrue(np.allclose(result, target))
def test_eager(self):
with _test_eager_guard():
self.test_basic_api()
if __name__ == '__main__':
unittest.main()
......@@ -17,6 +17,7 @@ import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
from paddle.fluid.framework import _test_eager_guard
class TestComplexTransposeLayer(unittest.TestCase):
......@@ -39,6 +40,10 @@ class TestComplexTransposeLayer(unittest.TestCase):
trans = paddle.transpose(var, perm=perm)
self.assertTrue(np.allclose(trans.numpy(), np_trans))
def test_eager(self):
with _test_eager_guard():
self.test_transpose_by_complex_api()
if __name__ == '__main__':
unittest.main()
......@@ -19,6 +19,7 @@ import paddle.fluid.dygraph as dg
import paddle.fluid.core as core
from paddle.fluid.framework import convert_np_dtype_to_dtype_
from paddle.fluid.data_feeder import convert_dtype
from paddle.fluid.framework import _test_eager_guard
class TestComplexVariable(unittest.TestCase):
......@@ -57,6 +58,12 @@ class TestComplexVariable(unittest.TestCase):
self.assertEqual(
convert_dtype(core.VarDesc.VarType.COMPLEX128), "complex128")
def test_eager(self):
with _test_eager_guard():
self.test_attrs()
self.test_convert_np_dtype_to_dtype()
self.test_convert_dtype()
if __name__ == '__main__':
unittest.main()
......@@ -21,6 +21,7 @@ from op_test import OpTest
import paddle
from paddle.fluid import dygraph
from paddle import static
from paddle.fluid.framework import _test_eager_guard
paddle.enable_static()
......@@ -45,14 +46,15 @@ class TestViewAsComplexOp(OpTest):
self.outputs = {'Out': out_ref}
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[ref_view_as_real(self.out_grad)],
user_defined_grad_outputs=[self.out_grad])
user_defined_grad_outputs=[self.out_grad],
check_eager=True)
class TestViewAsRealOp(OpTest):
......@@ -67,14 +69,15 @@ class TestViewAsRealOp(OpTest):
self.out_grad = np.ones([10, 10, 2], dtype="float64")
def test_check_output(self):
self.check_output()
self.check_output(check_eager=True)
def test_check_grad(self):
self.check_grad(
['X'],
'Out',
user_defined_grads=[ref_view_as_complex(self.out_grad)],
user_defined_grad_outputs=[self.out_grad])
user_defined_grad_outputs=[self.out_grad],
check_eager=True)
class TestViewAsComplexAPI(unittest.TestCase):
......@@ -99,6 +102,10 @@ class TestViewAsComplexAPI(unittest.TestCase):
[out_np] = exe.run(mp, feed={"x": self.x}, fetch_list=[out])
self.assertTrue(np.allclose(self.out, out_np))
def test_eager(self):
with _test_eager_guard():
self.test_dygraph()
class TestViewAsRealAPI(unittest.TestCase):
def setUp(self):
......@@ -122,6 +129,10 @@ class TestViewAsRealAPI(unittest.TestCase):
[out_np] = exe.run(mp, feed={"x": self.x}, fetch_list=[out])
self.assertTrue(np.allclose(self.out, out_np))
def test_eager(self):
with _test_eager_guard():
self.test_dygraph()
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册