test_loop.py 12.7 KB
Newer Older
H
Huihuang Zheng 已提交
1
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import inspect
16 17
import unittest

18
import numpy as np
19

20
import paddle
21
import paddle.nn.functional as F
22
from paddle import fluid
23 24
from paddle.jit.dy2static.loop_transformer import NameVisitor
from paddle.utils import gast
25 26 27 28 29 30 31 32 33 34 35 36 37

SEED = 2020
np.random.seed(SEED)


def while_loop_dyfunc(x):
    i = fluid.dygraph.to_variable(x)
    while x < 10:
        i = i + x
        x = x + 1
    return i


38 39 40 41 42 43 44 45 46 47 48
def while_loop_dyfunc_without_tensor(x):
    a = 1
    # There are no tensors in the while condition, which means it's a plain while in python,
    # so it wont't be transformed to `while_loop` op.
    while not a > 4 and a > 0:
        x = x + 1
        a = a + 1

    return x


49 50 51 52 53
def while_loop_dyfun_with_conflict_var(x):
    i = fluid.dygraph.to_variable(x)

    def relu(y):
        # 'y' is not visible outside the scope.
54
        return F.relu(y)
55 56 57 58 59 60 61 62 63 64 65

    while x < 10:
        # If a tmp variable is created which has same name
        # with a argument in function, it should not be
        # included in the loop_vars.
        add_fn = lambda x, y: x + y
        i = add_fn(i, x)
        x = x + 1
    return i


66
def while_loop_dyfunc_with_none(x):
67 68 69 70 71
    i = (
        fluid.dygraph.to_variable(x)
        if x is not None
        else fluid.dygraph.to_variable(x + 1)
    )
L
liym27 已提交
72 73
    # Use `to_variable` so that static analysis can analyze the type of X is Tensor
    x = fluid.dygraph.to_variable(
74 75
        x
    )  # TODO(liym27): Delete it if the type of parameter x can be resolved
76 77 78 79 80 81 82
    flag = 1
    while x < 10:
        i = i + x if flag is not None else x + i
        x = x + 1
    return i


83 84
def for_loop_dyfunc(max_len):
    for i in range(max_len):
85
        ret = paddle.zeros(shape=[1], dtype='float32')
86
        paddle.increment(ret, value=2.0)
87 88 89
    return ret


90 91
def for_loop_dyfunc2(max_len):
    # Test case: a variable is used and created in loop, but used before created
92
    x = paddle.tensor.fill_constant(shape=[1, 2], dtype="int32", value=1)
93

94 95 96 97
    for i in range(max_len):
        if i > 1:
            s = a
        a = 1
98 99
        q, _ = x.shape  # test var x.shape only used but not created in loop

100
    ret = paddle.tensor.fill_constant(shape=[1], dtype="int32", value=s + q)
101 102 103
    return ret


104
def for_loop_dyfunc3(max_len):
105
    ret = paddle.zeros(shape=[1], dtype='float32')
106
    for i in range(1, 10, 2):
107
        paddle.increment(ret, value=2.0)
108 109 110 111
    return ret


def for_loop_dyfunc4(max_len):
112
    ret = paddle.zeros(shape=[1], dtype='float32')
113
    for i in range(10, 1, -2):
114
        paddle.increment(ret, value=2.0)
115 116 117 118
    return ret


def for_loop_dyfunc_not_support(max_len):
119
    ret = paddle.zeros(shape=[1], dtype='float32')
120 121
    a = -2
    for i in range(10, 1, a):
122
        paddle.increment(ret, value=2.0)
123 124 125
    return ret


126
def for_break_single_return(max_len):
127
    x = 0
128 129 130
    for i in range(3):
        if i == 2:
            break
131 132
        x += 1
    return x
133 134


135 136
def while_loop_bool_op(x):
    i = fluid.dygraph.to_variable(x)
L
liym27 已提交
137

138
    while x <= -1 or x < -3 or (x < -7 or x < -5) or (x >= 0 and x < 10):
139 140 141 142 143
        i = i + x
        x = x + 1
    return i


144 145 146 147 148 149 150 151 152 153 154 155
def while_loop_bool_op2(x):
    i = fluid.dygraph.to_variable(x)
    a = 1

    # In the while condition, there are both Paddle Variable and non-Variable.
    while x < 10 and (a < 4 or a > 0) or a < -1 or not x > -1:
        i = i + x
        x = x + 1
        a = a + 1
    return i


156
def while_loop_class_var(x):
157
    class Foo:
158 159 160 161 162 163 164 165
        def __init__(self):
            self.a = 3
            self.b = 4
            self.c = 5

    foo = Foo()
    i = fluid.dygraph.to_variable(x)
    while i < 10:
166
        foo.b = paddle.zeros(shape=[1], dtype='float32')
167 168 169 170 171
        foo.c = foo.b + foo.a
        i += 1
    return foo.c


172 173 174 175 176 177 178 179 180 181
def loop_var_contains_property(x):
    a = paddle.zeros(shape=[1], dtype='float32')
    i = paddle.to_tensor(x)
    s = i.shape
    while i < 10 and s[0] >= 1:
        a += i.shape[0]
        i += 1
    return a


182
def for_loop_class_var(max_len):
183
    class Foo:
184 185 186 187 188 189
        def __init__(self):
            self.a = 3
            self.b = 4
            self.c = 5

    foo = Foo()
L
liym27 已提交
190 191

    # Use `to_variable` so that static analysis can analyze the type of X is Tensor
192
    max_len = paddle.tensor.fill_constant(
193 194
        shape=[1], value=max_len, dtype="int32"
    )
195

196
    for i in range(max_len):
197
        foo.b = paddle.zeros(shape=[1], dtype='float32')
198 199 200 201
        foo.c = foo.b + foo.a
    return foo.c


202 203
def var_create_in_for_loop(max_len):
    for i in range(max_len):
204
        ret = paddle.zeros(shape=[3, 4, 5], dtype='float64')
205 206 207
    return ret


208
def nested_for_loop_dyfunc():
209 210
    two = paddle.tensor.fill_constant(shape=[1], value=2, dtype="int32")
    three = paddle.tensor.fill_constant(shape=[1], value=3, dtype="int32")
211 212
    for j in range(two):
        for i in range(10):
213
            a = 2 + j
214 215

    for i in range(three):
216
        b = paddle.zeros(shape=[1], dtype='float32')
217 218 219 220

    return b


221 222 223 224 225 226 227 228 229 230 231
def for_loop_dufunc_with_listcomp(array):
    a = 1
    for j in range(array):
        res = [x + a for x in array]
        res = [i for i in array]
        x = 1
    b = [i for i in array]
    print(x)
    return res


232
class TestNameVisitor(unittest.TestCase):
233
    def setUp(self):
234
        self.loop_funcs = [
235 236 237 238
            while_loop_dyfunc,
            for_loop_dyfunc,
            while_loop_dyfunc_with_none,
            for_loop_dufunc_with_listcomp,
239 240
        ]
        self.loop_var_names = [
241 242 243 244
            {"i", "x"},
            {"i", "ret", "max_len"},
            {"i", "x"},
            {"j", "array", "res", "x"},
245
        ]
246
        self.create_var_names = [set(), {"ret"}, set(), {"res", "x"}]
247

248 249
        self.nested_for_loop_func = nested_for_loop_dyfunc

250
    def test_loop_vars(self):
251 252 253 254 255 256 257
        for i in range(len(self.loop_funcs)):
            func = self.loop_funcs[i]
            test_func = inspect.getsource(func)
            gast_root = gast.parse(test_func)
            name_visitor = NameVisitor(gast_root)
            for node in gast.walk(gast_root):
                if isinstance(node, (gast.While, gast.For)):
258 259 260 261
                    (
                        loop_var_names,
                        create_var_names,
                    ) = name_visitor.get_loop_var_names(node)
262 263 264
                    self.assertEqual(loop_var_names, self.loop_var_names[i])
                    self.assertEqual(create_var_names, self.create_var_names[i])

265 266 267 268 269 270 271
    def test_nested_loop_vars(self):
        func = self.nested_for_loop_func
        test_func = inspect.getsource(func)
        gast_root = gast.parse(test_func)
        name_visitor = NameVisitor(gast_root)

        self.loop_var_names = [
272 273 274
            {"j", "two"},
            {"i", "three", "b"},
            {"i"},
275
        ]
276
        self.create_var_names = [set(), {"b"}, set()]
277

278 279 280
        i = 0
        for node in gast.walk(gast_root):
            if isinstance(node, (gast.While, gast.For)):
281 282 283 284
                (
                    loop_var_names,
                    create_var_names,
                ) = name_visitor.get_loop_var_names(node)
285 286 287
                self.assertEqual(
                    loop_var_names,
                    self.loop_var_names[i],
288 289 290 291
                    msg="loop_var_names : {}, \nexpected loop_var_names : {}".format(
                        loop_var_names, self.loop_var_names[i]
                    ),
                )
292 293 294
                self.assertEqual(
                    create_var_names,
                    self.create_var_names[i],
295 296 297 298
                    msg="i = {}\ncreate_var_names : {}, \nexpected create_var_names : {}".format(
                        i, create_var_names, self.create_var_names[i]
                    ),
                )
299 300
                i += 1

301 302

class TestTransformWhileLoop(unittest.TestCase):
303
    def setUp(self):
304 305 306 307 308
        self.place = (
            fluid.CUDAPlace(0)
            if fluid.is_compiled_with_cuda()
            else fluid.CPUPlace()
        )
309
        self.x = np.zeros(shape=(1), dtype=np.int32)
310 311 312 313
        self._init_dyfunc()

    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc
314 315

    def _run_static(self):
316
        return self._run(to_static=True)
317 318

    def _run_dygraph(self):
319 320 321
        return self._run(to_static=False)

    def _run(self, to_static):
322
        with fluid.dygraph.guard(self.place):
W
wanghuancoder 已提交
323
            # Set the input of dyfunc to Tensor
324
            tensor_x = fluid.dygraph.to_variable(self.x, zero_copy=False)
325
            if to_static:
H
hjyp 已提交
326
                ret = paddle.jit.to_static(self.dyfunc)(tensor_x)
327
            else:
328
                ret = self.dyfunc(tensor_x)
329 330 331 332
            if hasattr(ret, "numpy"):
                return ret.numpy()
            else:
                return ret
333 334 335

    def test_ast_to_func(self):
        static_numpy = self._run_static()
336
        dygraph_numpy = self._run_dygraph()
337
        print(static_numpy, dygraph_numpy)
338
        np.testing.assert_allclose(dygraph_numpy, static_numpy, rtol=1e-05)
339 340


341 342 343 344 345
class TestTransformWhileLoopWithoutTensor(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc_without_tensor


346 347 348 349 350
class TestTransformWhileLoopWithConflicVar(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfun_with_conflict_var


351 352 353 354 355
class TestTransformWhileLoopWithNone(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc_with_none


356 357 358 359 360
class TestForBreakSingleReturn(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_break_single_return


361 362 363 364 365
class TestWhileLoopBoolOp(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_bool_op


366 367 368 369 370
class TestWhileLoopBoolOp2(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_bool_op2


371 372 373 374 375
class TestWhileLoopClassVar(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_class_var


376 377 378 379 380
class TestLoopVarContainsProperty(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = loop_var_contains_property


381 382
class TestTransformForLoop(unittest.TestCase):
    def setUp(self):
383 384 385 386 387
        self.place = (
            fluid.CUDAPlace(0)
            if fluid.is_compiled_with_cuda()
            else fluid.CPUPlace()
        )
388
        self.len = 100
389 390 391 392
        self._init_dyfunc()

    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc
393 394

    def _run_static(self):
395
        return self._run(to_static=True)
396 397

    def _run_dygraph(self):
398 399 400
        return self._run(to_static=False)

    def _run(self, to_static):
401
        with fluid.dygraph.guard(self.place):
402
            if to_static:
H
hjyp 已提交
403
                ret = paddle.jit.to_static(self.dyfunc)(self.len)
404 405
            else:
                ret = self.dyfunc(self.len)
406 407 408
            return ret.numpy()

    def test_ast_to_func(self):
409 410 411
        np.testing.assert_allclose(
            self._run_dygraph(), self._run_static(), rtol=1e-05
        )
412 413


414 415 416 417 418
class TestTransformForLoop2(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc2


419 420 421 422 423 424 425 426 427 428
class TestTransformForLoop3(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc3


class TestTransformForLoop4(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc4


429 430 431 432 433
class TestClassVarInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_class_var


434 435 436 437 438
class TestVarCreateInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = var_create_in_for_loop


439 440 441 442 443
class TestErrorInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc_not_support


444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477
class Net(paddle.nn.Layer):
    def __init__(self):
        super().__init__()

        self.layer_dict = paddle.nn.LayerDict(
            {
                "conv1": paddle.nn.Conv2D(3, 3, 1),
                "conv2": paddle.nn.Conv2D(3, 3, 1),
                "conv3": paddle.nn.Conv2D(3, 3, 1),
            }
        )

    def forward(self, x):
        out = 0
        for layer_name in self.layer_dict:
            out += self.layer_dict[layer_name](x)
        return out


class TestForLoopMeetDict(unittest.TestCase):
    def test_start(self):

        net = Net()
        model = paddle.jit.to_static(
            net,
            input_spec=[
                paddle.static.InputSpec(
                    shape=[None, 3, 224, 224], dtype='float32'
                )
            ],
        )
        paddle.jit.save(model, "./inference/inference")


478
if __name__ == '__main__':
479
    unittest.main()