未验证 提交 f0046889 编写于 作者: Y Yibing Liu 提交者: GitHub

Add complex layer for transpose & matmul, test=develop (#24195)

* Add complex layer for transpose & matmul, test=develop

* Tiny fixes in doc, test=develop

* Fix docs, test=develop
上级 e1a7a880
......@@ -12,10 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from . import linalg
from . import math
from . import manipulation
from .linalg import *
from .math import *
from .manipulation import *
__all__ = math.__all__ + []
__all__ = math.__all__
__all__ += linalg.__all__
__all__ += manipulation.__all__
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..helper import is_complex, is_real, complex_variable_exists
from ...fluid.framework import ComplexVariable
from ...fluid import layers
__all__ = ['matmul', ]
def matmul(x, y, transpose_x=False, transpose_y=False, alpha=1.0, name=None):
"""
Applies matrix multiplication to two complex number tensors. See the
detailed description in :ref:`api_fluid_layers_matmul`.
Args:
x (ComplexVariable|Variable): The first input, can be a ComplexVariable
with data type complex32 or complex64, or a Variable with data type
float32 or float64.
y (ComplexVariable|Variable): The second input, can be a ComplexVariable
with data type complex32 or complex64, or a Variable with data type
float32 or float64.
transpose_x (bool): Whether to transpose :math:`x` before multiplication.
transpose_y (bool): Whether to transpose :math:`y` before multiplication.
alpha (float): The scale of output. Default 1.0.
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Returns:
ComplexVariable: The product result, with the same data type as inputs.
Examples:
.. code-block:: python
import numpy as np
import paddle
import paddle.fluid.dygraph as dg
with dg.guard():
x = np.array([[1.0 + 1j, 2.0 + 1j], [3.0+1j, 4.0+1j]])
y = np.array([1.0 + 1j, 1.0 + 1j])
x_var = dg.to_variable(x)
y_var = dg.to_variable(y)
result = paddle.complex.matmul(x_var, y_var)
print(result.numpy())
# [1.+5.j 5.+9.j]
"""
# x = a + bi, y = c + di
# mm(x, y) = mm(a, c) - mm(b, d) + (mm(a, d) + mm(b, c))i
complex_variable_exists([x, y], "matmul")
a, b = (x.real, x.imag) if is_complex(x) else (x, None)
c, d = (y.real, y.imag) if is_complex(y) else (y, None)
ac = layers.matmul(a, c, transpose_x, transpose_y, alpha, name)
if is_real(b) and is_real(d):
bd = layers.matmul(b, d, transpose_x, transpose_y, alpha, name)
real = ac - bd
imag = layers.matmul(a, d, transpose_x, transpose_y, alpha, name) + \
layers.matmul(b, c, transpose_x, transpose_y, alpha, name)
elif is_real(b):
real = ac
imag = layers.matmul(b, c, transpose_x, transpose_y, alpha, name)
else:
real = ac
imag = layers.matmul(a, d, transpose_x, transpose_y, alpha, name)
return ComplexVariable(real, imag)
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......@@ -17,7 +17,10 @@ from ..helper import is_complex, is_real, complex_variable_exists
from ...fluid.framework import ComplexVariable
from ...fluid import layers
__all__ = ['reshape', ]
__all__ = [
'reshape',
'transpose',
]
def reshape(x, shape, inplace=False, name=None):
......@@ -71,7 +74,7 @@ def reshape(x, shape, inplace=False, name=None):
refer to :ref:`api_guide_Name` .
Returns:
Variable: A ``Tensor`` or ``LoDTensor``. The data type is same as ``x``. It is a new ComplexVariable if ``inplace`` is ``False``, otherwise it is ``x``.
ComplexVariable: A ``Tensor`` or ``LoDTensor``. The data type is same as ``x``. It is a new ComplexVariable if ``inplace`` is ``False``, otherwise it is ``x``.
Raises:
ValueError: If more than one elements of ``shape`` is -1.
......@@ -95,7 +98,7 @@ def reshape(x, shape, inplace=False, name=None):
y_np = y_var.numpy()
print(y_np.shape)
# (2, 12)
"""
"""
complex_variable_exists([x], "reshape")
if inplace:
x.real = fluid.layers.reshape(x.real, shape, inplace=inplace, name=name)
......@@ -104,3 +107,39 @@ def reshape(x, shape, inplace=False, name=None):
out_real = fluid.layers.reshape(x.real, shape, inplace=inplace, name=name)
out_imag = fluid.layers.reshape(x.imag, shape, inplace=inplace, name=name)
return ComplexVariable(out_real, out_imag)
def transpose(x, perm, name=None):
"""
Permute the data dimensions for complex number :attr:`input` according to `perm`.
See :ref:`api_fluid_layers_transpose` for the real number API.
Args:
x (ComplexVariable): The input n-D ComplexVariable with data type
complex64 or complex128.
perm (list): Permute the input according to the value of perm.
name (str): The name of this layer. It is optional.
Returns:
ComplexVariable: A transposed n-D ComplexVariable, with the same data type as :attr:`input`.
Examples:
.. code-block:: python
import paddle
import numpy as np
import paddle.fluid.dygraph as dg
with dg.guard():
a = np.array([[1.0 + 1.0j, 2.0 + 1.0j], [3.0+1.0j, 4.0+1.0j]])
x = dg.to_variable(a)
y = paddle.complex.transpose(x, [1, 0])
print(y.numpy())
# [[1.+1.j 3.+1.j]
# [2.+1.j 4.+1.j]]
"""
complex_variable_exists([x], "transpose")
real = layers.transpose(x.real, perm, name)
imag = layers.transpose(x.imag, perm, name)
return ComplexVariable(real, imag)
......@@ -1687,10 +1687,9 @@ class ComplexVariable(object):
holding the real part and imaginary part of complex numbers respectively.
**Notes**:
**The constructor of Variable should not be invoked directly.**
**The constructor of ComplexVariable should not be invoked directly.**
**Only support dygraph mode at present. Please use** :ref:`api_fluid_dygraph_to_variable` **
to create a dygraph ComplexVariable with complex number data.**
**Only support dygraph mode at present. Please use** :ref:`api_fluid_dygraph_to_variable` **to create a dygraph ComplexVariable with complex number data.**
Args:
real (Variable): The Variable holding real-part data.
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
class TestComplexMatMulLayer(unittest.TestCase):
def setUp(self):
self._places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda():
self._places.append(fluid.CUDAPlace(0))
def compare(self, x, y):
for place in self._places:
with dg.guard(place):
x_var = dg.to_variable(x)
y_var = dg.to_variable(y)
result = paddle.complex.matmul(x_var, y_var)
np_result = np.matmul(x, y)
self.assertTrue(np.allclose(result.numpy(), np_result))
def test_complex_xy(self):
x = np.random.random(
(2, 3, 4, 5)).astype("float32") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float32")
y = np.random.random(
(2, 3, 5, 4)).astype("float32") + 1J * np.random.random(
(2, 3, 5, 4)).astype("float32")
self.compare(x, y)
def test_complex_x(self):
x = np.random.random(
(2, 3, 4, 5)).astype("float32") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float32")
y = np.random.random((2, 3, 5, 4)).astype("float32")
self.compare(x, y)
def test_complex_y(self):
x = np.random.random((2, 3, 4, 5)).astype("float32")
y = np.random.random(
(2, 3, 5, 4)).astype("float32") + 1J * np.random.random(
(2, 3, 5, 4)).astype("float32")
self.compare(x, y)
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
class TestComplexTransposeLayer(unittest.TestCase):
def setUp(self):
self._places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda():
self._places.append(fluid.CUDAPlace(0))
def test_identity(self):
data = np.random.random(
(2, 3, 4, 5)).astype("float32") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float32")
perm = [3, 2, 0, 1]
np_trans = np.transpose(data, perm)
for place in self._places:
with dg.guard(place):
var = dg.to_variable(data)
trans = paddle.complex.transpose(var, perm=perm)
self.assertTrue(np.allclose(trans.numpy(), np_trans))
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册