test_loop.py 11.1 KB
Newer Older
H
Huihuang Zheng 已提交
1
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import gast
import inspect
import numpy as np
import paddle.fluid as fluid
import unittest

H
Huihuang Zheng 已提交
23
from paddle.fluid.dygraph.dygraph_to_static.loop_transformer import NameVisitor
24
from paddle.fluid.dygraph.jit import declarative
25 26 27 28 29 30 31 32 33 34 35 36 37

SEED = 2020
np.random.seed(SEED)


def while_loop_dyfunc(x):
    i = fluid.dygraph.to_variable(x)
    while x < 10:
        i = i + x
        x = x + 1
    return i


38 39 40 41 42 43 44 45 46 47 48
def while_loop_dyfunc_without_tensor(x):
    a = 1
    # There are no tensors in the while condition, which means it's a plain while in python,
    # so it wont't be transformed to `while_loop` op.
    while not a > 4 and a > 0:
        x = x + 1
        a = a + 1

    return x


49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
def while_loop_dyfun_with_conflict_var(x):
    i = fluid.dygraph.to_variable(x)

    def relu(y):
        # 'y' is not visible outside the scope.
        return fluid.layers.relu(y)

    while x < 10:
        # If a tmp variable is created which has same name
        # with a argument in function, it should not be
        # included in the loop_vars.
        add_fn = lambda x, y: x + y
        i = add_fn(i, x)
        x = x + 1
    return i


66 67 68 69
def while_loop_dyfunc_with_none(x):
    i = fluid.dygraph.to_variable(x)\
        if x is not None \
        else fluid.dygraph.to_variable(x+1)
L
liym27 已提交
70 71 72
    # Use `to_variable` so that static analysis can analyze the type of X is Tensor
    x = fluid.dygraph.to_variable(
        x)  # TODO(liym27): Delete it if the type of parameter x can be resolved
73 74 75 76 77 78 79
    flag = 1
    while x < 10:
        i = i + x if flag is not None else x + i
        x = x + 1
    return i


80 81 82 83 84 85 86
def for_loop_dyfunc(max_len):
    for i in range(max_len):
        ret = fluid.layers.zeros(shape=[1], dtype='float32')
        fluid.layers.increment(ret, value=2.0, in_place=True)
    return ret


87 88 89 90 91 92 93 94 95 96
def for_loop_dyfunc2(max_len):
    # Test case: a variable is used and created in loop, but used before created
    for i in range(max_len):
        if i > 1:
            s = a
        a = 1
    ret = fluid.layers.fill_constant(shape=[1], dtype="int32", value=s)
    return ret


97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
def for_loop_dyfunc3(max_len):
    ret = fluid.layers.zeros(shape=[1], dtype='float32')
    for i in range(1, 10, 2):
        fluid.layers.increment(ret, value=2.0, in_place=True)
    return ret


def for_loop_dyfunc4(max_len):
    ret = fluid.layers.zeros(shape=[1], dtype='float32')
    for i in range(10, 1, -2):
        fluid.layers.increment(ret, value=2.0, in_place=True)
    return ret


def for_loop_dyfunc_not_support(max_len):
    ret = fluid.layers.zeros(shape=[1], dtype='float32')
    a = -2
    for i in range(10, 1, a):
        fluid.layers.increment(ret, value=2.0, in_place=True)
    return ret


119 120
def while_loop_bool_op(x):
    i = fluid.dygraph.to_variable(x)
L
liym27 已提交
121

122
    while x <= -1 or x < -3 or (x < -7 or x < -5) or (x >= 0 and x < 10):
123 124 125 126 127
        i = i + x
        x = x + 1
    return i


128 129 130 131 132 133 134 135 136 137 138 139
def while_loop_bool_op2(x):
    i = fluid.dygraph.to_variable(x)
    a = 1

    # In the while condition, there are both Paddle Variable and non-Variable.
    while x < 10 and (a < 4 or a > 0) or a < -1 or not x > -1:
        i = i + x
        x = x + 1
        a = a + 1
    return i


140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
def while_loop_class_var(x):
    class Foo(object):
        def __init__(self):
            self.a = 3
            self.b = 4
            self.c = 5

    foo = Foo()
    i = fluid.dygraph.to_variable(x)
    while i < 10:
        foo.b = fluid.layers.zeros(shape=[1], dtype='float32')
        foo.c = foo.b + foo.a
        i += 1
    return foo.c


def for_loop_class_var(max_len):
    class Foo(object):
        def __init__(self):
            self.a = 3
            self.b = 4
            self.c = 5

    foo = Foo()
L
liym27 已提交
164 165 166 167

    # Use `to_variable` so that static analysis can analyze the type of X is Tensor
    max_len = fluid.layers.fill_constant(
        shape=[1], value=max_len, dtype="int32")
168

169 170 171 172 173 174
    for i in range(max_len):
        foo.b = fluid.layers.zeros(shape=[1], dtype='float32')
        foo.c = foo.b + foo.a
    return foo.c


175 176 177 178 179 180
def var_create_in_for_loop(max_len):
    for i in range(max_len):
        ret = fluid.layers.zeros(shape=[3, 4, 5], dtype='float64')
    return ret


181 182 183 184 185
def nested_for_loop_dyfunc():
    two = fluid.layers.fill_constant(shape=[1], value=2, dtype="int32")
    three = fluid.layers.fill_constant(shape=[1], value=3, dtype="int32")
    for j in range(two):
        for i in range(10):
186
            a = 2 + j
187 188 189 190 191 192 193

    for i in range(three):
        b = fluid.layers.zeros(shape=[1], dtype='float32')

    return b


194 195 196 197 198 199 200 201 202 203 204
def for_loop_dufunc_with_listcomp(array):
    a = 1
    for j in range(array):
        res = [x + a for x in array]
        res = [i for i in array]
        x = 1
    b = [i for i in array]
    print(x)
    return res


205
class TestNameVisitor(unittest.TestCase):
206
    def setUp(self):
207
        self.loop_funcs = [
208 209
            while_loop_dyfunc, for_loop_dyfunc, while_loop_dyfunc_with_none,
            for_loop_dufunc_with_listcomp
210 211
        ]
        self.loop_var_names = [
212 213
            set(["i", "x"]), set(["i", "ret", "max_len"]), set(["i", "x"]),
            set(["j", "array", "res", "x"])
214
        ]
215
        self.create_var_names = [set(), set(["ret"]), set(), set(["res", "x"])]
216

217 218
        self.nested_for_loop_func = nested_for_loop_dyfunc

219
    def test_loop_vars(self):
220 221 222 223 224 225 226 227 228 229 230 231
        for i in range(len(self.loop_funcs)):
            func = self.loop_funcs[i]
            test_func = inspect.getsource(func)
            gast_root = gast.parse(test_func)
            name_visitor = NameVisitor(gast_root)
            for node in gast.walk(gast_root):
                if isinstance(node, (gast.While, gast.For)):
                    loop_var_names, create_var_names = name_visitor.get_loop_var_names(
                        node)
                    self.assertEqual(loop_var_names, self.loop_var_names[i])
                    self.assertEqual(create_var_names, self.create_var_names[i])

232 233 234 235 236 237 238 239 240
    def test_nested_loop_vars(self):
        func = self.nested_for_loop_func
        test_func = inspect.getsource(func)
        gast_root = gast.parse(test_func)
        name_visitor = NameVisitor(gast_root)

        self.loop_var_names = [
            set(["j", "two"]),
            set(["i", "three", "b"]),
241
            set(["i", "j"]),
242 243
        ]
        self.create_var_names = [set(), set(["b"]), set()]
244

245 246 247 248 249
        i = 0
        for node in gast.walk(gast_root):
            if isinstance(node, (gast.While, gast.For)):
                loop_var_names, create_var_names = name_visitor.get_loop_var_names(
                    node)
250 251 252 253 254 255 256 257 258 259
                self.assertEqual(
                    loop_var_names,
                    self.loop_var_names[i],
                    msg="loop_var_names : {}, \nexpected loop_var_names : {}".
                    format(loop_var_names, self.loop_var_names[i]))
                self.assertEqual(
                    create_var_names,
                    self.create_var_names[i],
                    msg="i = {}\ncreate_var_names : {}, \nexpected create_var_names : {}".
                    format(i, create_var_names, self.create_var_names[i]))
260 261
                i += 1

262 263

class TestTransformWhileLoop(unittest.TestCase):
264 265 266 267
    def setUp(self):
        self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
        ) else fluid.CPUPlace()
        self.x = np.zeros(shape=(1), dtype=np.int32)
268 269 270 271
        self._init_dyfunc()

    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc
272 273

    def _run_static(self):
274
        return self._run(to_static=True)
275 276

    def _run_dygraph(self):
277 278 279
        return self._run(to_static=False)

    def _run(self, to_static):
280
        with fluid.dygraph.guard(self.place):
281 282
            # Set the input of dyfunc to VarBase
            tensor_x = fluid.dygraph.to_variable(self.x, zero_copy=False)
283
            if to_static:
284
                ret = declarative(self.dyfunc)(tensor_x)
285
            else:
286
                ret = self.dyfunc(tensor_x)
287 288 289 290
            return ret.numpy()

    def test_ast_to_func(self):
        static_numpy = self._run_static()
291 292
        dygraph_numpy = self._run_dygraph()
        self.assertTrue(np.allclose(dygraph_numpy, static_numpy))
293 294


295 296 297 298 299
class TestTransformWhileLoopWithoutTensor(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc_without_tensor


300 301 302 303 304
class TestTransformWhileLoopWithConflicVar(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfun_with_conflict_var


305 306 307 308 309
class TestTransformWhileLoopWithNone(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_dyfunc_with_none


310 311 312 313 314
class TestWhileLoopBoolOp(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_bool_op


315 316 317 318 319
class TestWhileLoopBoolOp2(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_bool_op2


320 321 322 323 324
class TestWhileLoopClassVar(TestTransformWhileLoop):
    def _init_dyfunc(self):
        self.dyfunc = while_loop_class_var


325 326 327 328 329
class TestTransformForLoop(unittest.TestCase):
    def setUp(self):
        self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
        ) else fluid.CPUPlace()
        self.len = 100
330 331 332 333
        self._init_dyfunc()

    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc
334 335

    def _run_static(self):
336
        return self._run(to_static=True)
337 338

    def _run_dygraph(self):
339 340 341
        return self._run(to_static=False)

    def _run(self, to_static):
342
        with fluid.dygraph.guard(self.place):
343 344 345 346
            if to_static:
                ret = declarative(self.dyfunc)(self.len)
            else:
                ret = self.dyfunc(self.len)
347 348 349 350 351 352
            return ret.numpy()

    def test_ast_to_func(self):
        self.assertTrue(np.allclose(self._run_dygraph(), self._run_static()))


353 354 355 356 357
class TestTransformForLoop2(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc2


358 359 360 361 362 363 364 365 366 367
class TestTransformForLoop3(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc3


class TestTransformForLoop4(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc4


368 369 370 371 372
class TestClassVarInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_class_var


373 374 375 376 377
class TestVarCreateInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = var_create_in_for_loop


378 379 380 381 382 383 384 385 386 387 388 389
class TestErrorInForLoop(TestTransformForLoop):
    def _init_dyfunc(self):
        self.dyfunc = for_loop_dyfunc_not_support

    def test_ast_to_func(self):
        with self.assertRaisesRegexp(
                NotImplementedError,
                "Dynamic-to-Static only supports the step value is a constant or negative constant "
        ):
            self._run_static()


390 391
if __name__ == '__main__':
    unittest.main()