test_expand_v2_op.py 9.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
import paddle
23
from paddle.fluid.framework import _test_eager_guard
24 25 26 27


# Situation 1: shape is a list(without tensor)
class TestExpandV2OpRank1(OpTest):
28

29 30 31
    def setUp(self):
        self.op_type = "expand_v2"
        self.init_data()
H
hong 已提交
32
        self.python_api = paddle.expand
33 34 35 36 37 38 39 40 41 42 43 44

        self.inputs = {'X': np.random.random(self.ori_shape).astype("float64")}
        self.attrs = {'shape': self.shape}
        output = np.tile(self.inputs['X'], self.expand_times)
        self.outputs = {'Out': output}

    def init_data(self):
        self.ori_shape = [100]
        self.shape = [100]
        self.expand_times = [1]

    def test_check_output(self):
H
hong 已提交
45
        self.check_output(check_eager=True)
46 47

    def test_check_grad(self):
H
hong 已提交
48
        self.check_grad(['X'], 'Out', check_eager=True)
49 50 51


class TestExpandV2OpRank2_DimExpanding(TestExpandV2OpRank1):
52

53 54 55 56 57 58 59
    def init_data(self):
        self.ori_shape = [120]
        self.shape = [2, 120]
        self.expand_times = [2, 1]


class TestExpandV2OpRank2(TestExpandV2OpRank1):
60

61 62 63 64 65 66 67
    def init_data(self):
        self.ori_shape = [1, 140]
        self.shape = [12, 140]
        self.expand_times = [12, 1]


class TestExpandV2OpRank3_Corner(TestExpandV2OpRank1):
68

69 70 71 72 73 74 75
    def init_data(self):
        self.ori_shape = (2, 10, 5)
        self.shape = (2, 10, 5)
        self.expand_times = (1, 1, 1)


class TestExpandV2OpRank4(TestExpandV2OpRank1):
76

77 78 79 80 81 82 83 84
    def init_data(self):
        self.ori_shape = (2, 4, 5, 7)
        self.shape = (-1, -1, -1, -1)
        self.expand_times = (1, 1, 1, 1)


# Situation 2: shape is a list(with tensor)
class TestExpandV2OpRank1_tensor_attr(OpTest):
85

86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115
    def setUp(self):
        self.op_type = "expand_v2"
        self.init_data()
        expand_shapes_tensor = []
        for index, ele in enumerate(self.expand_shape):
            expand_shapes_tensor.append(("x" + str(index), np.ones(
                (1)).astype('int32') * ele))

        self.inputs = {
            'X': np.random.random(self.ori_shape).astype("float64"),
            'expand_shapes_tensor': expand_shapes_tensor,
        }
        self.attrs = {"shape": self.infer_expand_shape}
        output = np.tile(self.inputs['X'], self.expand_times)
        self.outputs = {'Out': output}

    def init_data(self):
        self.ori_shape = [100]
        self.expand_times = [1]
        self.expand_shape = [100]
        self.infer_expand_shape = [-1]

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


class TestExpandV2OpRank2_Corner_tensor_attr(TestExpandV2OpRank1_tensor_attr):
116

117 118 119 120 121 122 123 124 125
    def init_data(self):
        self.ori_shape = [12, 14]
        self.expand_times = [1, 1]
        self.expand_shape = [12, 14]
        self.infer_expand_shape = [12, -1]


# Situation 3: shape is a tensor
class TestExpandV2OpRank1_tensor(OpTest):
126

127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
    def setUp(self):
        self.op_type = "expand_v2"
        self.init_data()

        self.inputs = {
            'X': np.random.random(self.ori_shape).astype("float64"),
            'Shape': np.array(self.expand_shape).astype("int32"),
        }
        self.attrs = {}
        output = np.tile(self.inputs['X'], self.expand_times)
        self.outputs = {'Out': output}

    def init_data(self):
        self.ori_shape = [100]
        self.expand_times = [2, 1]
        self.expand_shape = [2, 100]

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


# Situation 4: input x is Integer
class TestExpandV2OpInteger(OpTest):
153

154 155 156
    def setUp(self):
        self.op_type = "expand_v2"
        self.inputs = {
157
            'X': np.random.randint(10, size=(2, 4, 5)).astype("int32")
158 159 160 161 162 163 164 165 166 167 168
        }
        self.attrs = {'shape': [2, 4, 5]}
        output = np.tile(self.inputs['X'], (1, 1, 1))
        self.outputs = {'Out': output}

    def test_check_output(self):
        self.check_output()


# Situation 5: input x is Bool
class TestExpandV2OpBoolean(OpTest):
169

170 171 172 173 174 175 176 177 178 179 180 181 182
    def setUp(self):
        self.op_type = "expand_v2"
        self.inputs = {'X': np.random.randint(2, size=(2, 4, 5)).astype("bool")}
        self.attrs = {'shape': [2, 4, 5]}
        output = np.tile(self.inputs['X'], (1, 1, 1))
        self.outputs = {'Out': output}

    def test_check_output(self):
        self.check_output()


# Situation 56: input x is Integer
class TestExpandV2OpInt64_t(OpTest):
183

184 185 186
    def setUp(self):
        self.op_type = "expand_v2"
        self.inputs = {
187
            'X': np.random.randint(10, size=(2, 4, 5)).astype("int64")
188 189 190 191 192 193 194 195 196 197
        }
        self.attrs = {'shape': [2, 4, 5]}
        output = np.tile(self.inputs['X'], (1, 1, 1))
        self.outputs = {'Out': output}

    def test_check_output(self):
        self.check_output()


class TestExpandV2Error(unittest.TestCase):
198

199 200
    def test_errors(self):
        with program_guard(Program(), Program()):
201 202
            x1 = fluid.create_lod_tensor(np.array([[-1]]), [[1]],
                                         fluid.CPUPlace())
203 204 205 206 207
            shape = [2, 2]
            self.assertRaises(TypeError, paddle.tensor.expand, x1, shape)
            x2 = fluid.layers.data(name='x2', shape=[4], dtype="uint8")
            self.assertRaises(TypeError, paddle.tensor.expand, x2, shape)
            x3 = fluid.layers.data(name='x3', shape=[4], dtype="bool")
L
lilong12 已提交
208
            x3.stop_gradient = False
209 210 211 212 213
            self.assertRaises(ValueError, paddle.tensor.expand, x3, shape)


# Test python API
class TestExpandV2API(unittest.TestCase):
214

215 216
    def test_api(self):
        input = np.random.random([12, 14]).astype("float32")
217 218 219 220
        x = fluid.layers.data(name='x',
                              shape=[12, 14],
                              append_batch_size=False,
                              dtype="float32")
221 222

        positive_2 = fluid.layers.fill_constant([1], "int32", 12)
223 224 225 226
        expand_shape = fluid.layers.data(name="expand_shape",
                                         shape=[2],
                                         append_batch_size=False,
                                         dtype="int32")
227 228 229 230 231 232 233 234 235 236

        out_1 = paddle.expand(x, shape=[12, 14])
        out_2 = paddle.expand(x, shape=[positive_2, 14])
        out_3 = paddle.expand(x, shape=expand_shape)

        g0 = fluid.backward.calc_gradient(out_2, x)

        exe = fluid.Executor(place=fluid.CPUPlace())
        res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
                                      feed={
237 238
                                          "x":
                                          input,
239 240 241 242 243 244 245 246 247
                                          "expand_shape":
                                          np.array([12, 14]).astype("int32")
                                      },
                                      fetch_list=[out_1, out_2, out_3])
        assert np.array_equal(res_1, np.tile(input, (1, 1)))
        assert np.array_equal(res_2, np.tile(input, (1, 1)))
        assert np.array_equal(res_3, np.tile(input, (1, 1)))


248
class TestExpandInferShape(unittest.TestCase):
249

250 251 252 253 254
    def test_shape_with_var(self):
        with program_guard(Program(), Program()):
            x = paddle.static.data(shape=[-1, 1, 3], name='x')
            fake_var = paddle.randn([2, 3])
            target_shape = [
255 256
                -1, paddle.shape(fake_var)[0],
                paddle.shape(fake_var)[1]
257 258 259 260 261
            ]
            out = paddle.expand(x, shape=target_shape)
            self.assertListEqual(list(out.shape), [-1, -1, -1])


262
# Test python Dygraph API
263
class TestExpandV2DygraphAPI(unittest.TestCase):
264

265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286
    def test_expand_times_is_tensor(self):
        with paddle.fluid.dygraph.guard():
            with _test_eager_guard():
                paddle.seed(1)
                a = paddle.rand([2, 5])
                egr_expand_1 = paddle.expand(a, shape=[2, 5])
                np_array = np.array([2, 5])
                egr_expand_2 = paddle.expand(a, shape=np_array)

            paddle.seed(1)
            a = paddle.rand([2, 5])
            expand_1 = paddle.expand(a, shape=[2, 5])
            np_array = np.array([2, 5])
            expand_2 = paddle.expand(a, shape=np_array)

            self.assertTrue(
                np.array_equal(egr_expand_1.numpy(), egr_expand_2.numpy()))
            self.assertTrue(np.array_equal(expand_1.numpy(), expand_2.numpy()))
            self.assertTrue(
                np.array_equal(expand_1.numpy(), egr_expand_1.numpy()))


287
if __name__ == "__main__":
H
hong 已提交
288
    paddle.enable_static()
289
    unittest.main()