diff --git a/test/legacy_test/eager_op_test.py b/test/legacy_test/eager_op_test.py index cdd546021353d595d00102665236e4156d9c3afd..b809d371c255036186c4401211e5833a8ad5f0fb 100644 --- a/test/legacy_test/eager_op_test.py +++ b/test/legacy_test/eager_op_test.py @@ -2423,6 +2423,14 @@ class OpTest(unittest.TestCase): numeric_grad_delta = 1e-5 max_relative_error = 1e-7 + if ( + self.dtype == np.complex128 + and self.op_type + not in op_threshold_white_list.NEED_FIX_FP64_CHECK_GRAD_THRESHOLD_OP_LIST + ): + numeric_grad_delta = 1e-5 + max_relative_error = 1e-6 + cache_list = None if hasattr(self, "cache_name_list"): cache_list = self.cache_name_list diff --git a/test/legacy_test/test_complex_abs.py b/test/legacy_test/test_complex_abs.py index 3c59d3e1421cf5f448ae58b1eec457f299ff38ea..11c0fbc2b735e3d09f0cb47aafa524480c13622c 100644 --- a/test/legacy_test/test_complex_abs.py +++ b/test/legacy_test/test_complex_abs.py @@ -29,7 +29,6 @@ class TestComplexAbsOp(OpTest): self.dtype = np.float64 self.shape = (2, 3, 4, 5) self.init_input_output() - self.init_grad_input_output() self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(self.x)} self.outputs = {'Out': self.out} @@ -40,10 +39,6 @@ class TestComplexAbsOp(OpTest): ) + 1j * np.random.random(self.shape).astype(self.dtype) self.out = np.abs(self.x) - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) - self.grad_x = self.grad_out * (self.x / np.abs(self.x)) - def test_check_output(self): self.check_output() @@ -51,8 +46,6 @@ class TestComplexAbsOp(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) @@ -64,7 +57,6 @@ class TestComplexAbsOpZeroValues(OpTest): self.dtype = np.float64 self.shape = (2, 3, 4, 5) self.init_input_output() - self.init_grad_input_output() self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(self.x)} self.outputs = {'Out': self.out} @@ -75,10 +67,6 @@ class TestComplexAbsOpZeroValues(OpTest): ).astype(self.dtype) self.out = np.abs(self.x) - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) - self.grad_x = np.zeros(self.shape, self.dtype) - def test_check_output(self): self.check_output() @@ -86,8 +74,6 @@ class TestComplexAbsOpZeroValues(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) @@ -115,7 +101,6 @@ class TestRealAbsOp(OpTest): self.dtype = np.float64 self.shape = (2, 3, 4, 5) self.init_input_output() - self.init_grad_input_output() self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(self.x)} self.outputs = {'Out': self.out} @@ -124,10 +109,6 @@ class TestRealAbsOp(OpTest): self.x = 1 + np.random.random(self.shape).astype(self.dtype) self.out = np.abs(self.x) - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) - self.grad_x = self.grad_out * (self.x / np.abs(self.x)) - def test_check_output(self): self.check_output() @@ -135,8 +116,6 @@ class TestRealAbsOp(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) diff --git a/test/legacy_test/test_complex_op.py b/test/legacy_test/test_complex_op.py index c2c48eacd4aebc51bf8a52d2ffb6b13b4af6e7dd..e0e509d6d41de262a0cf5bd63264152357dd3c43 100644 --- a/test/legacy_test/test_complex_op.py +++ b/test/legacy_test/test_complex_op.py @@ -28,25 +28,6 @@ def ref_complex(x, y): return x + 1j * y -def ref_complex_grad(x, y, dout): - out = x + 1j * y - out_rank = out.ndim - delta_rank_x = out_rank - x.ndim - delta_rank_y = out_rank - y.ndim - - dx_reduce_axes = [] - dy_reduce_axes = [] - - for i in range(out_rank): - if i < delta_rank_x or dout.shape[i] > x.shape[i - delta_rank_x]: - dx_reduce_axes.append(i) - if i < delta_rank_y or dout.shape[i] > y.shape[i - delta_rank_y]: - dy_reduce_axes.append(i) - dx = np.sum(dout.real, axis=tuple(dx_reduce_axes)).reshape(x.shape) - dy = np.sum(dout.imag, axis=tuple(dy_reduce_axes)).reshape(y.shape) - return (dx, dy) - - class TestComplexOp(OpTest): def init_spec(self): self.x_shape = [10, 10] @@ -60,9 +41,6 @@ class TestComplexOp(OpTest): x = np.random.randn(*self.x_shape).astype(self.dtype) y = np.random.randn(*self.y_shape).astype(self.dtype) out_ref = ref_complex(x, y) - self.out_grad = np.random.randn(*self.x_shape).astype( - self.dtype - ) + 1j * np.random.randn(*self.y_shape).astype(self.dtype) self.inputs = {'X': x, 'Y': y} self.outputs = {'Out': out_ref} @@ -70,43 +48,23 @@ class TestComplexOp(OpTest): self.check_output() def test_check_grad(self): - dout = self.out_grad - dx, dy = ref_complex_grad( - self.inputs['X'], self.inputs['Y'], self.out_grad - ) self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[dx, dy], - user_defined_grad_outputs=[dout], ) def test_check_grad_ignore_x(self): - dout = self.out_grad - dx, dy = ref_complex_grad( - self.inputs['X'], self.inputs['Y'], self.out_grad - ) - self.assertTupleEqual(dx.shape, tuple(self.x_shape)) - self.assertTupleEqual(dy.shape, tuple(self.y_shape)) self.check_grad( ['Y'], 'Out', no_grad_set=set('X'), - user_defined_grads=[dy], - user_defined_grad_outputs=[dout], ) def test_check_grad_ignore_y(self): - dout = self.out_grad - dx, dy = ref_complex_grad( - self.inputs['X'], self.inputs['Y'], self.out_grad - ) self.check_grad( ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[dx], - user_defined_grad_outputs=[dout], ) diff --git a/test/legacy_test/test_complex_view_op.py b/test/legacy_test/test_complex_view_op.py index 816e691ffd2be53cd0018f6973e1b6edad18ee2c..81a5dcba620e915c79e864dff6d327974abd431f 100644 --- a/test/legacy_test/test_complex_view_op.py +++ b/test/legacy_test/test_complex_view_op.py @@ -39,9 +39,6 @@ class TestViewAsComplexOp(OpTest): self.python_api = paddle.as_complex x = np.random.randn(10, 10, 2).astype("float64") out_ref = ref_view_as_complex(x) - self.out_grad = np.ones([10, 10], dtype="float64") + 1j * np.ones( - [10, 10], dtype="float64" - ) self.inputs = {'X': x} self.outputs = {'Out': out_ref} @@ -52,8 +49,6 @@ class TestViewAsComplexOp(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[ref_view_as_real(self.out_grad)], - user_defined_grad_outputs=[self.out_grad], ) @@ -67,7 +62,6 @@ class TestViewAsRealOp(OpTest): self.inputs = {'X': x} self.outputs = {'Out': out_ref} self.python_api = paddle.as_real - self.out_grad = np.ones([10, 10, 2], dtype="float64") def test_check_output(self): self.check_output() @@ -76,8 +70,6 @@ class TestViewAsRealOp(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[ref_view_as_complex(self.out_grad)], - user_defined_grad_outputs=[self.out_grad], ) diff --git a/test/legacy_test/test_conj_op.py b/test/legacy_test/test_conj_op.py index d0bc124352fadf09bdad798b3715f7cfe01f9a60..8e9f90d08516ebae5f77ff8469e694e4c80aa879 100644 --- a/test/legacy_test/test_conj_op.py +++ b/test/legacy_test/test_conj_op.py @@ -36,7 +36,6 @@ class TestConjOp(OpTest): self.python_api = paddle.tensor.conj self.init_dtype_type() self.init_input_output() - self.init_grad_input_output() def init_dtype_type(self): self.dtype = np.complex64 @@ -50,12 +49,6 @@ class TestConjOp(OpTest): self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)} self.outputs = {'Out': out} - def init_grad_input_output(self): - self.grad_out = (np.ones((12, 14)) + 1j * np.ones((12, 14))).astype( - self.dtype - ) - self.grad_in = np.conj(self.grad_out) - def test_check_output(self): self.check_output() @@ -63,8 +56,6 @@ class TestConjOp(OpTest): self.check_grad( ['X'], 'Out', - user_defined_grads=[self.grad_in], - user_defined_grad_outputs=[self.grad_out], ) diff --git a/test/legacy_test/test_dot_op.py b/test/legacy_test/test_dot_op.py index 9d0e0b1b15d03393e50b0602a469b8f31ecc7242..cf3f9c48013f399bcb7369ee1de042b94d95ffec 100644 --- a/test/legacy_test/test_dot_op.py +++ b/test/legacy_test/test_dot_op.py @@ -189,7 +189,6 @@ class TestComplexDotOp(OpTest): self.python_api = paddle.dot self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -209,11 +208,6 @@ class TestComplexDotOp(OpTest): ) + 1j * np.random.random(100).astype(self.dtype) self.out = np.dot(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones([], self.dtype) + 1j * np.ones([], self.dtype) - self.grad_x = self.grad_out * np.conj(self.y) - self.grad_y = self.grad_out * np.conj(self.x) - def test_check_output(self): self.check_output() @@ -221,8 +215,6 @@ class TestComplexDotOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -230,8 +222,6 @@ class TestComplexDotOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -239,8 +229,6 @@ class TestComplexDotOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) @@ -250,7 +238,6 @@ class TestComplexDotOp2D(OpTest): self.python_api = paddle.dot self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -270,17 +257,6 @@ class TestComplexDotOp2D(OpTest): ) + 1j * np.random.random((2, 100)).astype(self.dtype) self.out = np.diag(np.dot(self.x, self.y.T)).reshape(-1) - def init_grad_input_output(self): - self.grad_out = np.ones((2), self.dtype) + 1j * np.ones((2), self.dtype) - self.grad_x = self._get_grad(self.grad_out, self.y) - self.grad_y = self._get_grad(self.grad_out, self.x) - - def _get_grad(self, grad_out, input): - grad = np.empty((0, input.shape[1])) - for i in range(grad_out.shape[0]): - grad = np.append(grad, [grad_out[i] * np.conj(input[i])], axis=0) - return grad - def test_check_output(self): self.check_output() @@ -288,8 +264,6 @@ class TestComplexDotOp2D(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -297,8 +271,6 @@ class TestComplexDotOp2D(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -306,8 +278,6 @@ class TestComplexDotOp2D(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) diff --git a/test/legacy_test/test_elementwise_add_op.py b/test/legacy_test/test_elementwise_add_op.py index f97d834c6804fcaede57653b018efba83f56d95a..833979c3782b1508c7cb58b794a500c68ef20ae0 100644 --- a/test/legacy_test/test_elementwise_add_op.py +++ b/test/legacy_test/test_elementwise_add_op.py @@ -688,7 +688,6 @@ class TestComplexElementwiseAddOp(OpTest): self.dtype = np.float64 self.shape = (2, 3, 4, 5) self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -698,7 +697,7 @@ class TestComplexElementwiseAddOp(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random(self.shape).astype( @@ -709,13 +708,6 @@ class TestComplexElementwiseAddOp(OpTest): ) + 1j * np.random.random(self.shape).astype(self.dtype) self.out = self.x + self.y - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) + 1j * np.ones( - self.shape, self.dtype - ) - self.grad_x = self.grad_out - self.grad_y = self.grad_out - def test_check_output(self): self.check_output() @@ -723,8 +715,6 @@ class TestComplexElementwiseAddOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -732,8 +722,6 @@ class TestComplexElementwiseAddOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -741,8 +729,6 @@ class TestComplexElementwiseAddOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) @@ -754,13 +740,6 @@ class TestRealComplexElementwiseAddOp(TestComplexElementwiseAddOp): ) + 1j * np.random.random(self.shape).astype(self.dtype) self.out = self.x + self.y - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) + 1j * np.ones( - self.shape, self.dtype - ) - self.grad_x = np.real(self.grad_out) - self.grad_y = self.grad_out - class TestBoolAddFloatElementwiseAddop(unittest.TestCase): def test_static_add(self): diff --git a/test/legacy_test/test_elementwise_div_op.py b/test/legacy_test/test_elementwise_div_op.py index 2432b3b04e4ab81c685cc61d23e1877bddb7a0fb..eba60625668b2feec93e51d23001a80b9e549d1c 100644 --- a/test/legacy_test/test_elementwise_div_op.py +++ b/test/legacy_test/test_elementwise_div_op.py @@ -513,7 +513,6 @@ class TestComplexElementwiseDivOp(OpTest): self.python_api = paddle.divide self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -523,7 +522,7 @@ class TestComplexElementwiseDivOp(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random((2, 3, 4, 5)).astype( @@ -534,13 +533,6 @@ class TestComplexElementwiseDivOp(OpTest): ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype) self.out = self.x / self.y - def init_grad_input_output(self): - self.grad_out = np.ones((2, 3, 4, 5), self.dtype) + 1j * np.ones( - (2, 3, 4, 5), self.dtype - ) - self.grad_x = self.grad_out / np.conj(self.y) - self.grad_y = -self.grad_out * np.conj(self.x / self.y / self.y) - def test_check_output(self): self.check_output() @@ -548,8 +540,6 @@ class TestComplexElementwiseDivOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -557,8 +547,6 @@ class TestComplexElementwiseDivOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -566,8 +554,6 @@ class TestComplexElementwiseDivOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) diff --git a/test/legacy_test/test_elementwise_mul_op.py b/test/legacy_test/test_elementwise_mul_op.py index 0b773108a8c9083f81590ad90832dbc9911a9fe0..8356d055c208cba9757222ff2a69d4009a800c6b 100644 --- a/test/legacy_test/test_elementwise_mul_op.py +++ b/test/legacy_test/test_elementwise_mul_op.py @@ -494,7 +494,6 @@ class TestComplexElementwiseMulOp(OpTest): self.python_api = paddle.multiply self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -504,7 +503,7 @@ class TestComplexElementwiseMulOp(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random((2, 3, 4, 5)).astype( @@ -515,13 +514,6 @@ class TestComplexElementwiseMulOp(OpTest): ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype) self.out = self.x * self.y - def init_grad_input_output(self): - self.grad_out = np.ones((2, 3, 4, 5), self.dtype) + 1j * np.ones( - (2, 3, 4, 5), self.dtype - ) - self.grad_x = self.grad_out * np.conj(self.y) - self.grad_y = self.grad_out * np.conj(self.x) - def test_check_output(self): self.check_output() @@ -529,8 +521,6 @@ class TestComplexElementwiseMulOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -538,8 +528,6 @@ class TestComplexElementwiseMulOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -547,12 +535,13 @@ class TestComplexElementwiseMulOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) class TestRealComplexElementwiseMulOp(TestComplexElementwiseMulOp): + def init_base_dtype(self): + self.dtype = np.complex128 + def init_input_output(self): self.x = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.y = np.random.random((2, 3, 4, 5)).astype( @@ -560,13 +549,6 @@ class TestRealComplexElementwiseMulOp(TestComplexElementwiseMulOp): ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype) self.out = self.x * self.y - def init_grad_input_output(self): - self.grad_out = np.ones((2, 3, 4, 5), self.dtype) + 1j * np.ones( - (2, 3, 4, 5), self.dtype - ) - self.grad_x = np.real(self.grad_out * np.conj(self.y)) - self.grad_y = self.grad_out * np.conj(self.x) - class TestElementwiseMulop(unittest.TestCase): def test_dygraph_mul(self): diff --git a/test/legacy_test/test_elementwise_sub_op.py b/test/legacy_test/test_elementwise_sub_op.py index 2d7858fa1db5b0dfa8cd6709317e2a21e7383ffc..0690d60b56890f2bf2cc232e382e378823e30fc8 100644 --- a/test/legacy_test/test_elementwise_sub_op.py +++ b/test/legacy_test/test_elementwise_sub_op.py @@ -787,7 +787,6 @@ class TestComplexElementwiseSubOp(OpTest): self.dtype = np.float64 self.shape = (2, 3, 4, 5) self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -810,13 +809,6 @@ class TestComplexElementwiseSubOp(OpTest): ) + 1j * np.random.random(self.shape).astype(self.dtype) self.out = self.x - self.y - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) + 1j * np.ones( - self.shape, self.dtype - ) - self.grad_x = self.grad_out - self.grad_y = -self.grad_out - def test_check_output(self): self.check_output() @@ -824,8 +816,6 @@ class TestComplexElementwiseSubOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_prim=self.check_prim, ) @@ -834,8 +824,6 @@ class TestComplexElementwiseSubOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_prim=self.check_prim, ) @@ -844,8 +832,6 @@ class TestComplexElementwiseSubOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], check_prim=self.check_prim, ) @@ -853,7 +839,7 @@ class TestComplexElementwiseSubOp(OpTest): self.enable_cinn = False def if_check_prim(self): - self.check_prim = True + self.check_prim = False class TestRealComplexElementwiseSubOp(TestComplexElementwiseSubOp): @@ -864,13 +850,6 @@ class TestRealComplexElementwiseSubOp(TestComplexElementwiseSubOp): ) + 1j * np.random.random(self.shape).astype(self.dtype) self.out = self.x - self.y - def init_grad_input_output(self): - self.grad_out = np.ones(self.shape, self.dtype) + 1j * np.ones( - self.shape, self.dtype - ) - self.grad_x = np.real(self.grad_out) - self.grad_y = -self.grad_out - def if_enable_cinn(self): self.enable_cinn = False diff --git a/test/legacy_test/test_kron_op.py b/test/legacy_test/test_kron_op.py index 5bcdcc048689aaba1faf9e08cb9a1161b7b59600..735e8bd1e3203028e723110268edc5a5e84970ef 100644 --- a/test/legacy_test/test_kron_op.py +++ b/test/legacy_test/test_kron_op.py @@ -158,7 +158,6 @@ class TestComplexKronOp(OpTest): self.out_shape = self.x_shape * self.y_shape self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -168,7 +167,7 @@ class TestComplexKronOp(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random(self.x_shape).astype( @@ -179,39 +178,6 @@ class TestComplexKronOp(OpTest): ) + 1j * np.random.random(self.y_shape).astype(self.dtype) self.out = np.kron(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones(self.out_shape, self.dtype) + 1j * np.ones( - self.out_shape, self.dtype - ) - self.grad_x = self.get_grad_x_by_numpy() - self.grad_y = self.get_grad_y_by_numpy() - - def get_grad_x_by_numpy(self): - grad_x = np.zeros(self.x_shape, np.complex128) - for x_i in range(self.x_shape[0]): - for x_j in range(self.x_shape[1]): - for i in range(self.y_shape[0]): - for j in range(self.y_shape[1]): - idx_i = x_i * self.y_shape[0] + i - idx_j = x_j * self.y_shape[1] + j - grad_x[x_i][x_j] += self.grad_out[idx_i][ - idx_j - ] * np.conj(self.y[i][j]) - return grad_x - - def get_grad_y_by_numpy(self): - grad_y = np.zeros(self.y_shape, np.complex128) - for y_i in range(self.y_shape[0]): - for y_j in range(self.y_shape[1]): - for x_i in range(self.x_shape[0]): - for x_j in range(self.x_shape[1]): - idx_i = x_i * self.y_shape[0] + y_i - idx_j = x_j * self.y_shape[1] + y_j - grad_y[y_i][y_j] += self.grad_out[idx_i][ - idx_j - ] * np.conj(self.x[x_i][x_j]) - return grad_y - def test_check_output(self): self.check_output() @@ -219,8 +185,6 @@ class TestComplexKronOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_x(self): @@ -228,8 +192,6 @@ class TestComplexKronOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], ) def test_check_grad_ingore_y(self): @@ -237,8 +199,6 @@ class TestComplexKronOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], ) @@ -250,13 +210,6 @@ class TestKronOpTypePromotion(TestComplexKronOp): ) + 1j * np.random.random(self.y_shape).astype(self.dtype) self.out = np.kron(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones(self.out_shape, self.dtype) + 1j * np.ones( - self.out_shape, self.dtype - ) - self.grad_x = self.get_grad_x_by_numpy().real - self.grad_y = self.get_grad_y_by_numpy() - if __name__ == '__main__': paddle.enable_static() diff --git a/test/legacy_test/test_matmul_v2_op.py b/test/legacy_test/test_matmul_v2_op.py index 957b03549ef3e0093439c1107507ce3cfd3cb423..f7b83fce17787c899816d830070bd1f35afc4ad9 100644 --- a/test/legacy_test/test_matmul_v2_op.py +++ b/test/legacy_test/test_matmul_v2_op.py @@ -598,7 +598,6 @@ class TestComplexMatMulOp(OpTest): self.python_api = paddle.tensor.matmul self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -608,7 +607,7 @@ class TestComplexMatMulOp(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random((10, 10)).astype( @@ -619,13 +618,6 @@ class TestComplexMatMulOp(OpTest): ) + 1j * np.random.random((10, 10)).astype(self.dtype) self.out = np.dot(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones((10, 10), self.dtype) + 1j * np.ones( - (10, 10), self.dtype - ) - self.grad_x = np.matmul(self.grad_out, np.conj(self.y).T) - self.grad_y = np.matmul(np.conj(self.x).T, self.grad_out) - def test_check_output(self): self.check_output(check_cinn=False) @@ -633,8 +625,6 @@ class TestComplexMatMulOp(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -643,8 +633,6 @@ class TestComplexMatMulOp(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -653,8 +641,6 @@ class TestComplexMatMulOp(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -665,7 +651,6 @@ class TestComplexMatMulOpBroadcast(OpTest): self.python_api = paddle.tensor.matmul self.init_base_dtype() self.init_input_output() - self.init_grad_input_output() self.inputs = { 'X': OpTest.np_dtype_to_fluid_dtype(self.x), @@ -675,7 +660,7 @@ class TestComplexMatMulOpBroadcast(OpTest): self.outputs = {'Out': self.out} def init_base_dtype(self): - self.dtype = np.float64 + self.dtype = np.complex128 def init_input_output(self): self.x = np.random.random((10, 2, 5)).astype( @@ -686,15 +671,6 @@ class TestComplexMatMulOpBroadcast(OpTest): ) + 1j * np.random.random((5, 20)).astype(self.dtype) self.out = np.dot(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones((10, 2, 20), self.dtype) + 1j * np.ones( - (10, 2, 20), self.dtype - ) - self.grad_x = np.matmul(self.grad_out, np.conj(self.y).T) - self.grad_y = np.sum( - np.matmul(np.conj(self.x).transpose(0, 2, 1), self.grad_out), axis=0 - ) - def test_check_output(self): self.check_output(check_cinn=False) @@ -702,8 +678,6 @@ class TestComplexMatMulOpBroadcast(OpTest): self.check_grad( ['X', 'Y'], 'Out', - user_defined_grads=[self.grad_x, self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -712,8 +686,6 @@ class TestComplexMatMulOpBroadcast(OpTest): ['Y'], 'Out', no_grad_set=set("X"), - user_defined_grads=[self.grad_y], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -722,8 +694,6 @@ class TestComplexMatMulOpBroadcast(OpTest): ['X'], 'Out', no_grad_set=set('Y'), - user_defined_grads=[self.grad_x], - user_defined_grad_outputs=[self.grad_out], check_cinn=False, ) @@ -736,13 +706,6 @@ class TestMatMulTypePromotion(TestComplexMatMulOp): ) + 1j * np.random.random((10, 10)).astype(self.dtype) self.out = np.dot(self.x, self.y) - def init_grad_input_output(self): - self.grad_out = np.ones((10, 10), self.dtype) + 1j * np.ones( - (10, 10), self.dtype - ) - self.grad_x = np.matmul(self.grad_out, np.conj(self.y).T).real - self.grad_y = np.matmul(np.conj(self.x).T, self.grad_out) - class TestMatmulop(unittest.TestCase): def func_dygraph_matmul(self):