test_scatter_nd_op.py 12.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16

17 18
import numpy as np
from op_test import OpTest
19

20
import paddle
21
import paddle.fluid as fluid
22
from paddle.fluid.dygraph.base import switch_to_static_graph
23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69


def numpy_scatter_nd(ref, index, updates, fun):
    ref_shape = ref.shape
    index_shape = index.shape

    end_size = index_shape[-1]
    remain_numl = 1
    for i in range(len(index_shape) - 1):
        remain_numl *= index_shape[i]

    slice_size = 1
    for i in range(end_size, len(ref_shape)):
        slice_size *= ref_shape[i]

    flat_index = index.reshape([remain_numl] + list(index_shape[-1:]))
    flat_updates = updates.reshape((remain_numl, slice_size))
    flat_output = ref.reshape(list(ref_shape[:end_size]) + [slice_size])

    for i_up, i_out in enumerate(flat_index):
        i_out = tuple(i_out)
        flat_output[i_out] = fun(flat_output[i_out], flat_updates[i_up])
    return flat_output.reshape(ref.shape)


def numpy_scatter_nd_add(ref, index, updates):
    return numpy_scatter_nd(ref, index, updates, lambda x, y: x + y)


def judge_update_shape(ref, index):
    ref_shape = ref.shape
    index_shape = index.shape
    update_shape = []
    for i in range(len(index_shape) - 1):
        update_shape.append(index_shape[i])
    for i in range(index_shape[-1], len(ref_shape), 1):
        update_shape.append(ref_shape[i])
    return update_shape


class TestScatterNdAddSimpleOp(OpTest):
    """
    A simple example
    """

    def setUp(self):
        self.op_type = "scatter_nd_add"
H
hong 已提交
70
        self.python_api = paddle.scatter_nd_add
71
        ref_np = np.random.random([100]).astype("float64")
72
        index_np = np.random.randint(0, 100, [100, 1]).astype("int32")
73
        updates_np = np.random.random([100]).astype("float64")
74 75 76 77 78 79
        expect_np = numpy_scatter_nd_add(ref_np.copy(), index_np, updates_np)

        self.inputs = {'X': ref_np, 'Index': index_np, 'Updates': updates_np}
        self.outputs = {'Out': expect_np}

    def test_check_output(self):
80
        self.check_output(check_eager=True)
81 82

    def test_check_grad(self):
83
        self.check_grad(['X', 'Updates'], 'Out', check_eager=True)
84 85 86 87 88 89 90 91 92


class TestScatterNdAddWithEmptyIndex(OpTest):
    """
    Index has empty element
    """

    def setUp(self):
        self.op_type = "scatter_nd_add"
H
hong 已提交
93
        self.python_api = paddle.scatter_nd_add
Z
zhupengyang 已提交
94
        ref_np = np.random.random((10, 10)).astype("float64")
95
        index_np = np.array([[], []]).astype("int32")
Z
zhupengyang 已提交
96
        updates_np = np.random.random((2, 10, 10)).astype("float64")
97 98 99 100 101 102 103

        expect_np = numpy_scatter_nd_add(ref_np.copy(), index_np, updates_np)

        self.inputs = {'X': ref_np, 'Index': index_np, 'Updates': updates_np}
        self.outputs = {'Out': expect_np}

    def test_check_output(self):
104
        self.check_output(check_eager=True)
105 106

    def test_check_grad(self):
107
        self.check_grad(['X', 'Updates'], 'Out', check_eager=True)
108 109 110 111 112 113 114 115 116


class TestScatterNdAddWithHighRankSame(OpTest):
    """
    Both Index and X have high rank, and Rank(Index) = Rank(X)
    """

    def setUp(self):
        self.op_type = "scatter_nd_add"
H
hong 已提交
117
        self.python_api = paddle.scatter_nd_add
S
ShenLiang 已提交
118
        shape = (3, 2, 2, 1, 10)
119
        ref_np = np.random.rand(*shape).astype("float64")
120 121 122
        index_np = np.vstack(
            [np.random.randint(0, s, size=100) for s in shape]
        ).T.astype("int32")
123
        update_shape = judge_update_shape(ref_np, index_np)
124
        updates_np = np.random.rand(*update_shape).astype("float64")
125 126 127 128 129 130
        expect_np = numpy_scatter_nd_add(ref_np.copy(), index_np, updates_np)

        self.inputs = {'X': ref_np, 'Index': index_np, 'Updates': updates_np}
        self.outputs = {'Out': expect_np}

    def test_check_output(self):
131
        self.check_output(check_eager=True)
132 133

    def test_check_grad(self):
134
        self.check_grad(['X', 'Updates'], 'Out', check_eager=True)
135 136 137 138 139 140 141 142 143


class TestScatterNdAddWithHighRankDiff(OpTest):
    """
    Both Index and X have high rank, and Rank(Index) < Rank(X)
    """

    def setUp(self):
        self.op_type = "scatter_nd_add"
H
hong 已提交
144
        self.python_api = paddle.scatter_nd_add
S
ShenLiang 已提交
145
        shape = (8, 2, 2, 1, 10)
146 147 148 149 150 151 152 153 154 155 156
        ref_np = np.random.rand(*shape).astype("double")
        index = np.vstack([np.random.randint(0, s, size=500) for s in shape]).T
        index_np = index.reshape([10, 5, 10, 5]).astype("int64")
        update_shape = judge_update_shape(ref_np, index_np)
        updates_np = np.random.rand(*update_shape).astype("double")
        expect_np = numpy_scatter_nd_add(ref_np.copy(), index_np, updates_np)

        self.inputs = {'X': ref_np, 'Index': index_np, 'Updates': updates_np}
        self.outputs = {'Out': expect_np}

    def test_check_output(self):
157
        self.check_output(check_eager=True)
158 159

    def test_check_grad(self):
160
        self.check_grad(['X', 'Updates'], 'Out', check_eager=True)
161 162


163
# Test Python API
164
class TestScatterNdOpAPI(unittest.TestCase):
165 166 167 168 169
    """
    test scatter_nd_add api and scatter_nd api
    """

    def testcase1(self):
170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
        ref1 = fluid.layers.data(
            name='ref1',
            shape=[10, 9, 8, 1, 3],
            dtype='float32',
            append_batch_size=False,
        )
        index1 = fluid.layers.data(
            name='index1',
            shape=[5, 5, 8, 5],
            dtype='int32',
            append_batch_size=False,
        )
        updates1 = fluid.layers.data(
            name='update1',
            shape=[5, 5, 8],
            dtype='float32',
            append_batch_size=False,
        )
188
        output1 = paddle.scatter_nd_add(ref1, index1, updates1)
189 190

    def testcase2(self):
191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
        ref2 = fluid.layers.data(
            name='ref2',
            shape=[10, 9, 8, 1, 3],
            dtype='double',
            append_batch_size=False,
        )
        index2 = fluid.layers.data(
            name='index2',
            shape=[5, 8, 5],
            dtype='int32',
            append_batch_size=False,
        )
        updates2 = fluid.layers.data(
            name='update2',
            shape=[5, 8],
            dtype='double',
            append_batch_size=False,
        )
209
        output2 = paddle.scatter_nd_add(
210 211
            ref2, index2, updates2, name="scatter_nd_add"
        )
212 213 214

    def testcase3(self):
        shape3 = [10, 9, 8, 1, 3]
215 216 217 218 219 220 221 222 223 224 225 226
        index3 = fluid.layers.data(
            name='index3',
            shape=[5, 5, 8, 5],
            dtype='int32',
            append_batch_size=False,
        )
        updates3 = fluid.layers.data(
            name='update3',
            shape=[5, 5, 8],
            dtype='float32',
            append_batch_size=False,
        )
227
        output3 = paddle.scatter_nd(index3, updates3, shape3)
228 229 230

    def testcase4(self):
        shape4 = [10, 9, 8, 1, 3]
231 232 233 234 235 236 237 238 239 240 241 242
        index4 = fluid.layers.data(
            name='index4',
            shape=[5, 5, 8, 5],
            dtype='int32',
            append_batch_size=False,
        )
        updates4 = fluid.layers.data(
            name='update4',
            shape=[5, 5, 8],
            dtype='double',
            append_batch_size=False,
        )
243
        output4 = paddle.scatter_nd(index4, updates4, shape4, name='scatter_nd')
244

245 246 247 248 249 250 251 252 253 254 255 256
    def testcase5(self):
        if not fluid.core.is_compiled_with_cuda():
            return

        shape = [2, 3, 4]
        x = np.arange(int(np.prod(shape))).reshape(shape)
        index = np.array([[0, 0, 2], [0, 1, 2]])
        val = np.array([-1, -3])

        with fluid.dygraph.guard():
            device = paddle.get_device()
            paddle.set_device('gpu')
257 258 259 260 261
            gpu_value = paddle.scatter_nd_add(
                paddle.to_tensor(x),
                paddle.to_tensor(index),
                paddle.to_tensor(val),
            )
262
            paddle.set_device('cpu')
263 264 265 266 267
            cpu_value = paddle.scatter_nd_add(
                paddle.to_tensor(x),
                paddle.to_tensor(index),
                paddle.to_tensor(val),
            )
268
            np.testing.assert_array_equal(gpu_value.numpy(), cpu_value.numpy())
269 270 271 272
            paddle.set_device(device)

        @switch_to_static_graph
        def test_static_graph():
273 274 275
            with paddle.static.program_guard(
                paddle.static.Program(), paddle.static.Program()
            ):
276
                x_t = paddle.static.data(name="x", dtype=x.dtype, shape=x.shape)
277 278 279 280 281 282
                index_t = paddle.static.data(
                    name="index", dtype=index.dtype, shape=index.shape
                )
                val_t = paddle.static.data(
                    name="val", dtype=val.dtype, shape=val.shape
                )
283 284 285 286 287 288 289 290
                out_t = paddle.scatter_nd_add(x_t, index_t, val_t)
                feed = {x_t.name: x, index_t.name: index, val_t.name: val}
                fetch = [out_t]

                gpu_exe = paddle.static.Executor(paddle.CUDAPlace(0))
                gpu_value = gpu_exe.run(feed=feed, fetch_list=fetch)[0]
                cpu_exe = paddle.static.Executor(paddle.CPUPlace())
                cpu_value = cpu_exe.run(feed=feed, fetch_list=fetch)[0]
291
                np.testing.assert_array_equal(gpu_value, cpu_value)
292 293 294

        test_static_graph()

295

296
# Test Raise Error
297
class TestScatterNdOpRaise(unittest.TestCase):
298 299 300
    def test_check_raise(self):
        def check_raise_is_test():
            try:
301 302 303 304 305 306 307 308 309
                ref5 = fluid.layers.data(
                    name='ref5', shape=[3, 4, 5], dtype='float32'
                )
                index5 = fluid.layers.data(
                    name='index5', shape=[2, 10], dtype='int32'
                )
                updates5 = fluid.layers.data(
                    name='updates5', shape=[2, 10], dtype='float32'
                )
310
                output5 = paddle.scatter_nd_add(ref5, index5, updates5)
311
            except Exception as e:
312
                t = "The last dimension of Input(Index)'s shape should be no greater "
313 314 315 316 317 318 319
                if t in str(e):
                    raise IndexError

        self.assertRaises(IndexError, check_raise_is_test)

    def test_check_raise2(self):
        with self.assertRaises(ValueError):
320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337
            ref6 = fluid.layers.data(
                name='ref6',
                shape=[10, 9, 8, 1, 3],
                dtype='double',
                append_batch_size=False,
            )
            index6 = fluid.layers.data(
                name='index6',
                shape=[5, 8, 5],
                dtype='int32',
                append_batch_size=False,
            )
            updates6 = fluid.layers.data(
                name='update6',
                shape=[5, 8],
                dtype='float32',
                append_batch_size=False,
            )
338
            output6 = paddle.scatter_nd_add(ref6, index6, updates6)
339 340 341 342 343

    def test_check_raise3(self):
        def check_raise_is_test():
            try:
                shape = [3, 4, 5]
344 345 346 347 348 349
                index7 = fluid.layers.data(
                    name='index7', shape=[2, 1], dtype='int32'
                )
                updates7 = fluid.layers.data(
                    name='updates7', shape=[2, 4, 5, 20], dtype='float32'
                )
350
                output7 = paddle.scatter_nd(index7, updates7, shape)
351
            except Exception as e:
352
                t = "Updates has wrong shape"
353 354 355 356 357 358
                if t in str(e):
                    raise ValueError

        self.assertRaises(ValueError, check_raise_is_test)


359 360 361 362 363 364 365 366 367
class TestDygraph(unittest.TestCase):
    def test_dygraph(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            index_data = np.array([[1, 1], [0, 1], [1, 3]]).astype(np.int64)
            index = fluid.dygraph.to_variable(index_data)
            updates = paddle.rand(shape=[3, 9, 10], dtype='float32')
            shape = [3, 5, 9, 10]
            output = paddle.scatter_nd(index, updates, shape)

Z
zhangchunle 已提交
368
    def test_dygraph_1(self):
369 370 371 372 373 374 375 376
        with fluid.dygraph.guard(fluid.CPUPlace()):
            x = paddle.rand(shape=[3, 5, 9, 10], dtype='float32')
            updates = paddle.rand(shape=[3, 9, 10], dtype='float32')
            index_data = np.array([[1, 1], [0, 1], [1, 3]]).astype(np.int64)
            index = fluid.dygraph.to_variable(index_data)
            output = paddle.scatter_nd_add(x, index, updates)


377
if __name__ == "__main__":
378
    paddle.enable_static()
379
    unittest.main()