test_dot_op.py 9.5 KB
Newer Older
L
liuwei1031 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
#  Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function
import paddle
import paddle.fluid as fluid
R
ronnywang 已提交
18
import paddle.fluid.core as core
L
liuwei1031 已提交
19 20 21 22 23 24 25 26
import unittest
import numpy as np
from op_test import OpTest, skip_check_grad_ci
from paddle.fluid.op import Operator
from paddle.fluid import compiler, Program, program_guard


class DotOp(OpTest):
27

L
liuwei1031 已提交
28 29
    def setUp(self):
        self.op_type = "dot"
30
        self.python_api = paddle.dot
L
liuwei1031 已提交
31 32 33 34 35 36 37 38 39 40 41
        self.init_dtype()
        self.init_input_output()

        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y)
        }
        self.outputs = {'Out': self.out}
        self.attrs = {}

    def test_check_output(self):
42
        self.check_output(check_eager=True)
L
liuwei1031 已提交
43 44

    def test_check_grad_normal(self):
R
ronnywang 已提交
45 46 47 48
        if core.is_compiled_with_rocm():
            self.check_grad(
                ['X', 'Y'],
                'Out',
49 50
                user_defined_grads=[self.inputs['Y'], self.inputs['X']],
                check_eager=True)
R
ronnywang 已提交
51
        else:
52
            self.check_grad(['X', 'Y'], 'Out', check_eager=True)
L
liuwei1031 已提交
53 54

    def test_check_grad_ingore_x(self):
R
ronnywang 已提交
55
        if core.is_compiled_with_rocm():
56 57 58
            self.check_grad(['Y'],
                            'Out',
                            no_grad_set=set("X"),
59 60
                            user_defined_grads=[self.inputs['X']],
                            check_eager=True)
R
ronnywang 已提交
61
        else:
62 63 64 65
            self.check_grad(['Y'],
                            'Out',
                            no_grad_set=set("X"),
                            check_eager=True)
L
liuwei1031 已提交
66 67

    def test_check_grad_ingore_y(self):
R
ronnywang 已提交
68
        if core.is_compiled_with_rocm():
69 70 71
            self.check_grad(['X'],
                            'Out',
                            no_grad_set=set('Y'),
72 73
                            user_defined_grads=[self.inputs['Y']],
                            check_eager=True)
R
ronnywang 已提交
74
        else:
75 76 77 78
            self.check_grad(['X'],
                            'Out',
                            no_grad_set=set('Y'),
                            check_eager=True)
L
liuwei1031 已提交
79 80 81 82 83 84 85 86 87 88 89

    def init_input_output(self):
        self.x = np.random.uniform(0.1, 1, [121]).astype(self.dtype)
        self.y = np.random.uniform(1, 3, [121]).astype(self.dtype)
        self.out = np.dot(self.x, self.y)

    def init_dtype(self):
        self.dtype = np.float64


class DotOpBatch(DotOp):
90

L
liuwei1031 已提交
91
    def init_input_output(self):
92 93 94 95
        self.x = np.random.uniform(0.1, 1,
                                   [132]).astype(self.dtype).reshape([11, 12])
        self.y = np.random.uniform(1, 3,
                                   [132]).astype(self.dtype).reshape([11, 12])
L
liuwei1031 已提交
96 97
        self.out = np.sum(self.x * self.y, axis=1).reshape([11, 1])

R
ronnywang 已提交
98 99 100 101 102 103 104 105 106
    def test_check_grad_normal(self):
        self.check_grad(['X', 'Y'], 'Out')

    def test_check_grad_ingore_x(self):
        self.check_grad(['Y'], 'Out', no_grad_set=set("X"))

    def test_check_grad_ingore_y(self):
        self.check_grad(['X'], 'Out', no_grad_set=set('Y'))

L
liuwei1031 已提交
107 108

class TestDotOpError(unittest.TestCase):
109

L
liuwei1031 已提交
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128
    def test_errors(self):
        with program_guard(Program(), Program()):

            # the input dtype of elementwise_mul must be float16 or float32 or float64 or int32 or int64
            # float16 only can be set on GPU place
            x1 = fluid.layers.data(name='x1', shape=[120], dtype="uint8")
            y1 = fluid.layers.data(name='y1', shape=[120], dtype="uint8")
            self.assertRaises(Exception, paddle.dot, x1, y1)

            x2 = fluid.layers.data(name='x2', shape=[2, 3], dtype="float32")
            y2 = fluid.layers.data(name='y2', shape=[2, 3], dtype="float32")
            self.assertRaises(Exception, paddle.dot, x2, y2)

            x3 = fluid.layers.data(name='x3', shape=[3], dtype="float32")
            y3 = fluid.layers.data(name='y3', shape=[2, 3], dtype="float32")
            self.assertRaises(Exception, paddle.dot, x2, y3)


class TestDygraph(unittest.TestCase):
129

L
liuwei1031 已提交
130 131 132 133
    def test_dygraph(self):
        with fluid.dygraph.guard():
            x1 = fluid.dygraph.to_variable(np.array([1, 3]).astype(np.float32))
            y1 = fluid.dygraph.to_variable(np.array([2, 5]).astype(np.float32))
134 135 136
            np.testing.assert_allclose(paddle.dot(x1, y1).numpy(),
                                       np.array([17]),
                                       rtol=1e-05)
L
liuwei1031 已提交
137 138 139 140 141

            x1 = fluid.dygraph.to_variable(
                np.array([[1, 3], [3, 5]]).astype(np.float32))
            y1 = fluid.dygraph.to_variable(
                np.array([[2, 5], [6, 8]]).astype(np.float32))
142 143
            np.testing.assert_array_equal(
                paddle.dot(x1, y1).numpy(), np.array([[17], [58]]))
L
liuwei1031 已提交
144 145


C
chentianyu03 已提交
146
class TestComplexDotOp(OpTest):
147

C
chentianyu03 已提交
148 149
    def setUp(self):
        self.op_type = "dot"
150
        self.python_api = paddle.dot
C
chentianyu03 已提交
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177
        self.init_base_dtype()
        self.init_input_output()
        self.init_grad_input_output()

        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y)
        }
        self.attrs = {'axis': -1, 'use_mkldnn': False}
        self.outputs = {'Out': self.out}

    def init_base_dtype(self):
        self.dtype = np.float64

    def init_input_output(self):
        self.x = np.random.random(100).astype(
            self.dtype) + 1J * np.random.random(100).astype(self.dtype)
        self.y = np.random.random(100).astype(
            self.dtype) + 1J * np.random.random(100).astype(self.dtype)
        self.out = np.dot(self.x, self.y)

    def init_grad_input_output(self):
        self.grad_out = np.ones(1, self.dtype) + 1J * np.ones(1, self.dtype)
        self.grad_x = self.grad_out * np.conj(self.y)
        self.grad_y = self.grad_out * np.conj(self.x)

    def test_check_output(self):
178
        self.check_output(check_eager=True)
C
chentianyu03 已提交
179 180

    def test_check_grad_normal(self):
181 182 183
        self.check_grad(['X', 'Y'],
                        'Out',
                        user_defined_grads=[self.grad_x, self.grad_y],
184 185
                        user_defined_grad_outputs=[self.grad_out],
                        check_eager=True)
C
chentianyu03 已提交
186 187

    def test_check_grad_ingore_x(self):
188 189 190 191
        self.check_grad(['Y'],
                        'Out',
                        no_grad_set=set("X"),
                        user_defined_grads=[self.grad_y],
192 193
                        user_defined_grad_outputs=[self.grad_out],
                        check_eager=True)
C
chentianyu03 已提交
194 195

    def test_check_grad_ingore_y(self):
196 197 198 199
        self.check_grad(['X'],
                        'Out',
                        no_grad_set=set('Y'),
                        user_defined_grads=[self.grad_x],
200 201
                        user_defined_grad_outputs=[self.grad_out],
                        check_eager=True)
C
chentianyu03 已提交
202 203 204


class TestComplexDotOp2D(OpTest):
205

C
chentianyu03 已提交
206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246
    def setUp(self):
        self.op_type = "dot"
        self.init_base_dtype()
        self.init_input_output()
        self.init_grad_input_output()

        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y)
        }
        self.attrs = {'axis': -1, 'use_mkldnn': False}
        self.outputs = {'Out': self.out}

    def init_base_dtype(self):
        self.dtype = np.float64

    def init_input_output(self):
        self.x = np.random.random(
            (2, 100)).astype(self.dtype) + 1J * np.random.random(
                (2, 100)).astype(self.dtype)
        self.y = np.random.random(
            (2, 100)).astype(self.dtype) + 1J * np.random.random(
                (2, 100)).astype(self.dtype)
        self.out = np.diag(np.dot(self.x, self.y.T)).reshape(-1, 1)

    def init_grad_input_output(self):
        self.grad_out = np.ones((2, 1), self.dtype) + 1J * np.ones(
            (2, 1), self.dtype)
        self.grad_x = self._get_grad(self.grad_out, self.y)
        self.grad_y = self._get_grad(self.grad_out, self.x)

    def _get_grad(self, grad_out, input):
        grad = np.empty((0, input.shape[1]))
        for i in range(grad_out.shape[0]):
            grad = np.append(grad, [grad_out[i] * np.conj(input[i])], axis=0)
        return grad

    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
247 248 249 250
        self.check_grad(['X', 'Y'],
                        'Out',
                        user_defined_grads=[self.grad_x, self.grad_y],
                        user_defined_grad_outputs=[self.grad_out])
C
chentianyu03 已提交
251 252

    def test_check_grad_ingore_x(self):
253 254 255 256 257
        self.check_grad(['Y'],
                        'Out',
                        no_grad_set=set("X"),
                        user_defined_grads=[self.grad_y],
                        user_defined_grad_outputs=[self.grad_out])
C
chentianyu03 已提交
258 259

    def test_check_grad_ingore_y(self):
260 261 262 263 264
        self.check_grad(['X'],
                        'Out',
                        no_grad_set=set('Y'),
                        user_defined_grads=[self.grad_x],
                        user_defined_grad_outputs=[self.grad_out])
C
chentianyu03 已提交
265 266


L
liuwei1031 已提交
267
if __name__ == '__main__':
C
chentianyu03 已提交
268
    paddle.enable_static()
L
liuwei1031 已提交
269
    unittest.main()