test_declarative.py 16.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
import os
import tempfile
17 18 19 20 21
import unittest

import numpy as np
from test_basic_api_transformation import dyfunc_to_variable

A
Aurelius84 已提交
22
import paddle
23
from paddle import fluid
24
from paddle.fluid.dygraph import to_variable
H
hjyp 已提交
25
from paddle.jit.api import to_static
26
from paddle.jit.dy2static.program_translator import (
27 28 29
    ConcreteProgram,
    StaticFunction,
)
30
from paddle.nn import Layer
31
from paddle.static import InputSpec
32 33 34 35


class SimpleNet(Layer):
    def __init__(self):
36
        super().__init__()
37
        self.linear = paddle.nn.Linear(10, 3)
38

H
hjyp 已提交
39
    @to_static(input_spec=[InputSpec(shape=[None, 10], dtype='float32')])
40 41 42 43
    def forward(self, x, a=1, b=2):
        y = self.inner_function(x)
        return y

H
hjyp 已提交
44
    @to_static
45 46 47 48 49 50 51 52
    def inner_function(self, x):
        y = self.linear(x)
        return y

    def add_func(self, x, y):
        z = x + y
        return z

H
hjyp 已提交
53
    @to_static(input_spec=[[InputSpec([None, 10]), InputSpec([None, 10])]])
54 55
    def func_with_list(self, l, int_val=1):
        x, y = l
56 57 58 59
        z = x + y
        z = z + int_val
        return z

H
hjyp 已提交
60
    @to_static(
61 62
        input_spec=[{'x': InputSpec([None, 10]), 'y': InputSpec([None, 10])}]
    )
63 64 65 66 67 68 69
    def func_with_dict(self, d):
        x = d['x']
        y = d['y']
        z = x + y

        return z

H
hjyp 已提交
70
    @to_static(
71 72 73 74 75 76 77
        input_spec=[
            [
                InputSpec([None]),
                {'x': InputSpec([None, 10]), 'y': InputSpec([None, 10])},
            ]
        ]
    )
78 79 80 81 82 83 84 85 86 87 88
    def func_with_list_dict(self, dl):
        bias = dl[0]
        x = dl[1]['x']
        y = dl[1]['y']

        z = x + y
        z = z + bias

        return z


89
class TestStaticFunctionInstance(unittest.TestCase):
90 91 92 93 94
    def test_instance_same_class(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            net_1 = SimpleNet()
            net_2 = SimpleNet()

95 96
            self.assertTrue(isinstance(net_1.forward, StaticFunction))
            self.assertTrue(isinstance(net_2.forward, StaticFunction))
97 98 99 100 101 102 103 104 105
            self.assertNotEqual(net_1.forward, net_2.forward)

            # convert layer into static progam of net_1
            net_1.forward.concrete_program
            self.assertTrue(len(net_1.forward.program_cache) == 1)
            # check no conversion applid with net_2
            self.assertTrue(len(net_2.forward.program_cache) == 0)


106 107
class TestInputSpec(unittest.TestCase):
    def setUp(self):
108 109 110 111 112
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'simple_net')

    def tearDown(self):
        self.temp_dir.cleanup()
113 114 115 116 117

    def test_with_input_spec(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            x = to_variable(np.ones([4, 10]).astype('float32'))
            y = to_variable(np.ones([4, 10]).astype('float32') * 2)
118
            int_val = 4.0
119 120 121 122 123 124 125 126

            net = SimpleNet()

            # 1. each method holds independent program cache
            out = net(x)
            self.assertTrue(len(net.forward.program_cache) == 1)

            # 2. test save load
127
            net.inner_function(x)
128 129
            paddle.jit.save(net, self.model_path)
            infer_net = paddle.jit.load(self.model_path)
130
            pred = infer_net(x)
131
            np.testing.assert_allclose(out.numpy(), pred.numpy(), rtol=1e-05)
132 133 134

            # 3. we can decorate any method
            x_2 = to_variable(np.ones([4, 20]).astype('float32'))
H
hjyp 已提交
135 136
            # uses `to_static(func)` instead of `@to_static`
            net.add_func = to_static(net.add_func)
137 138 139 140
            out = net.add_func(x_2, np.ones([20]).astype('float32'))
            self.assertTrue(len(net.add_func.program_cache) == 1)

            # 5. test input with list
141
            out = net.func_with_list([x, y], int_val)
142 143

            # 6. test input with dict
144
            out = net.func_with_dict({'x': x, 'y': y})
145 146 147 148 149 150 151 152 153

            # 7. test input with lits contains dict
            int_np = np.ones([1]).astype('float32')
            out = net.func_with_list_dict([int_np, {'x': x, 'y': y}])

    def test_with_error(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            x = to_variable(np.ones([4, 10]).astype('float32'))
            y = to_variable(np.ones([4, 10]).astype('float32') * 2)
154
            int_val = 4.0
155 156 157 158 159 160 161 162 163

            net = SimpleNet()

            # 1. kwargs and input_spec should not be specificed in same time
            with self.assertRaises(ValueError):
                net(x, a=1, other_kwarg=2)

            # 2. requires len(input_spec) <= len(args)
            with self.assertRaises(ValueError):
H
hjyp 已提交
164
                net.add_func = to_static(
165 166 167 168 169 170 171
                    net.add_func,
                    input_spec=[
                        InputSpec([-1, 10]),
                        InputSpec([-1, 10]),
                        InputSpec([10]),
                    ],
                )
172 173 174 175 176 177
                net.add_func(x, y)

    def test_concrete_program(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            x = to_variable(np.ones([4, 10]).astype('float32'))
            y = to_variable(np.ones([4, 10]).astype('float32') * 2)
178
            int_val = 4.0
179 180 181

            net = SimpleNet()
            # We can get concrete_program by specificing InputSpec information. Faking input is no need.
H
hjyp 已提交
182
            net.add_func = to_static(
183
                net.add_func,
184 185
                input_spec=[InputSpec([-1, 10]), InputSpec([-1, 10], name='y')],
            )
186 187 188 189 190
            cp1 = net.add_func.concrete_program
            self.assertTrue(cp1.inputs[-1].shape == (-1, 10))
            self.assertTrue(cp1.inputs[-1].name == 'y')

            # generate another program
H
hjyp 已提交
191
            net.add_func = to_static(
192
                net.add_func,
193 194
                input_spec=[InputSpec([10]), InputSpec([10], name='label')],
            )
195
            cp2 = net.add_func.concrete_program
196
            self.assertTrue(cp2.inputs[-1].shape == (10,))
197
            self.assertTrue(cp2.inputs[-1].name == 'label')
H
hjyp 已提交
198
            # Note(Aurelius84): New instance will be returned if we use `to_static(foo)` every time.
199 200 201 202 203 204 205 206 207 208 209
            # So number of cache program is 1.
            self.assertTrue(len(net.add_func.program_cache) == 1)
            self.assertTrue(cp1 != cp2)


def foo_func(a, b, c=1, d=2):
    z = a + b
    return z


class TestDifferentInputSpecCacheProgram(unittest.TestCase):
210
    def setUp(self):
R
Ryan 已提交
211
        paddle.jit.enable_to_static(True)
212

213 214 215 216 217 218
    def test_with_different_input(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):
            x_data = np.ones([16, 10]).astype('float32')
            y_data = np.ones([10]).astype('float32') * 2
            z_data = np.ones([10]).astype('float32') * 2.2

H
hjyp 已提交
219
            foo = to_static(foo_func)
220 221 222

            # [16, 10] + [10] (varbase)
            out_1 = foo(to_variable(x_data), to_variable(y_data))
223 224 225
            np.testing.assert_allclose(
                x_data + y_data, out_1.numpy(), rtol=1e-05
            )
226
            self.assertTrue(len(foo.program_cache) == 1)
227
            self.assertTrue(len(foo.program_cache.concrete_programs()) == 1)
228
            first_program = foo.program_cache.last()
229 230 231

            # [16, 10] + [10] (numpy)
            out_2 = foo(to_variable(x_data), y_data)
232 233 234
            np.testing.assert_allclose(
                x_data + y_data, out_2.numpy(), rtol=1e-05
            )
235 236 237 238
            self.assertTrue(len(foo.program_cache) == 1)

            # [16, 10] + [10] (numpy)
            out_3 = foo(to_variable(x_data), z_data)
239 240 241
            np.testing.assert_allclose(
                x_data + z_data, out_3.numpy(), rtol=1e-05
            )
242 243 244 245 246
            # hit cache program
            self.assertTrue(len(foo.program_cache) == 1)

            # [16, 10] + [10] (numpy) with other different arguments (c=3)
            out_4 = foo(to_variable(x_data), z_data, 3)
247 248 249
            np.testing.assert_allclose(
                x_data + z_data, out_4.numpy(), rtol=1e-05
            )
250 251 252
            # create a new program
            self.assertTrue(len(foo.program_cache) == 2)

253 254 255 256 257
            # test for recent program
            foo(to_variable(x_data), y_data)
            recent_program = foo.program_cache.last()
            self.assertTrue(first_program == recent_program)

258 259
    def test_get_concrete_program(self):

H
hjyp 已提交
260
        foo = to_static(foo_func)
261 262

        # 1. specific InputSpec for `x`/`y`
263 264 265
        concrete_program_1 = foo.get_concrete_program(
            InputSpec([None, 10]), InputSpec([10])
        )
266 267 268
        self.assertTrue(len(foo.program_cache) == 1)

        # 2. specific `c`/`d` explicitly with same default value
269 270 271
        concrete_program_2 = foo.get_concrete_program(
            InputSpec([None, 10]), InputSpec([10]), 1, 2
        )
272 273 274 275
        self.assertTrue(concrete_program_2 == concrete_program_1)
        self.assertTrue(len(foo.program_cache) == 1)

        # 3. specific `c` = 2
276 277 278
        concrete_program_3 = foo.get_concrete_program(
            InputSpec([None, 10]), InputSpec([10]), c=2
        )
279 280 281 282
        self.assertTrue(concrete_program_3 != concrete_program_1)
        self.assertTrue(len(foo.program_cache) == 2)

        # 4. specific x.shape = [10]
283 284 285
        concrete_program_4 = foo.get_concrete_program(
            InputSpec([10]), InputSpec([10])
        )
286 287 288 289 290 291 292 293
        self.assertTrue(concrete_program_4 != concrete_program_1)
        self.assertTrue(len(foo.program_cache) == 3)

        # 5. only specific InputSpec of x
        with self.assertRaises(ValueError):
            concrete_program_5 = foo.get_concrete_program(InputSpec([10]))

        # 6. specific unknown kwargs `e`=4
294
        with self.assertRaises(TypeError):
295 296 297
            concrete_program_5 = foo.get_concrete_program(
                InputSpec([10]), InputSpec([10]), e=4
            )
298

A
Aurelius84 已提交
299 300 301 302
    def test_concrete_program(self):
        with fluid.dygraph.guard(fluid.CPUPlace()):

            # usage 1
303 304 305 306 307 308 309
            foo_1 = paddle.jit.to_static(
                foo_func,
                input_spec=[
                    InputSpec([10], name='x'),
                    InputSpec([10], name='y'),
                ],
            )
A
Aurelius84 已提交
310 311 312 313 314 315 316 317 318 319 320 321
            self.assertTrue(isinstance(foo_1.concrete_program, ConcreteProgram))

            # usage 2
            foo_2 = paddle.jit.to_static(foo_func)
            out = foo_2(paddle.rand([10]), paddle.rand([10]))
            self.assertTrue(isinstance(foo_2.concrete_program, ConcreteProgram))

            # raise error
            foo_3 = paddle.jit.to_static(foo_func)
            with self.assertRaises(ValueError):
                foo_3.concrete_program

322

323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346
class TestInputDefaultName(unittest.TestCase):
    def setUp(self):
        paddle.disable_static()
        self.net = SimpleNet()

    def assert_default_name(self, func_name, input_names):
        decorated_func = getattr(self.net, func_name)

        spec_names = [x.name for x in decorated_func.inputs]
        self.assertListEqual(spec_names, input_names)

    def test_common_input(self):
        self.assert_default_name('forward', ['x'])

    def test_list_input(self):
        self.assert_default_name('func_with_list', ['l_0', 'l_1'])

    def test_dict_input(self):
        self.assert_default_name('func_with_dict', ['x', 'y'])

    def test_nest_input(self):
        self.assert_default_name('func_with_list_dict', ['dl_0', 'x', 'y'])


347 348
class TestDeclarativeAPI(unittest.TestCase):
    def test_error(self):
H
hjyp 已提交
349
        func = to_static(dyfunc_to_variable)
350 351 352 353 354 355 356 357

        paddle.enable_static()

        # Failed to run the callable object decorated by '@paddle.jit.to_static'
        # if it does NOT in dynamic mode.
        with self.assertRaises(RuntimeError):
            func(np.ones(5).astype("int32"))

R
Ryan 已提交
358
        paddle.jit.enable_to_static(False)
359 360 361 362 363 364
        with self.assertRaises(AssertionError):
            # AssertionError: We Only support to_variable in imperative mode,
            #  please use fluid.dygraph.guard() as context to run it in imperative Mode
            func(np.ones(5).astype("int32"))


365 366 367
class TestDecorateModelDirectly(unittest.TestCase):
    def setUp(self):
        paddle.disable_static()
R
Ryan 已提交
368
        paddle.jit.enable_to_static(True)
369 370 371 372
        self.x = to_variable(np.ones([4, 10]).astype('float32'))

    def test_fake_input(self):
        net = SimpleNet()
H
hjyp 已提交
373
        net = to_static(net)
374 375 376 377 378
        y = net(self.x)
        self.assertTrue(len(net.forward.program_cache) == 1)

    def test_input_spec(self):
        net = SimpleNet()
H
hjyp 已提交
379
        net = to_static(net, input_spec=[InputSpec([None, 8, 10])])
380 381 382 383 384 385
        self.assertTrue(len(net.forward.inputs) == 1)
        self.assertTrue(len(net.forward.program_cache) == 1)
        input_shape = net.forward.inputs[0].shape
        self.assertListEqual(list(input_shape), [-1, 8, 10])

        # redecorate
H
hjyp 已提交
386
        net = to_static(net, input_spec=[InputSpec([None, 16, 10])])
387 388 389 390
        input_shape = net.forward.inputs[0].shape
        self.assertListEqual(list(input_shape), [-1, 16, 10])


391 392 393 394 395 396
class TestErrorWithInitFromStaticMode(unittest.TestCase):
    def test_raise_error(self):
        # disable imperative
        paddle.enable_static()

        net = SimpleNet()
397
        with self.assertRaisesRegex(
398 399
            RuntimeError, "only available in dynamic mode"
        ):
400 401
            net.forward.concrete_program

402
        with self.assertRaisesRegex(
403 404
            RuntimeError, "only available in dynamic mode"
        ):
405 406
            net.forward.inputs

407
        with self.assertRaisesRegex(
408 409
            RuntimeError, "only available in dynamic mode"
        ):
410 411 412
            net.forward.outputs


413 414
class CallNonForwardFuncNet(paddle.nn.Layer):
    def __init__(self):
415
        super().__init__()
416 417 418 419 420 421 422 423 424
        self.sub = CallNonForwardFuncSubNet()

    @paddle.jit.to_static
    def forward(self):
        return self.sub.func()


class CallNonForwardFuncSubNet(paddle.nn.Layer):
    def __init__(self):
425
        super().__init__()
426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441
        self.a = paddle.to_tensor([1, 2])

    def func(self):
        x = self.a * 2
        return x


class TestCallNonForwardFunc(unittest.TestCase):
    def test_call_non_forward(self):
        paddle.disable_static()
        net = CallNonForwardFuncNet()
        out = net()
        self.assertEqual(out.numpy().tolist(), [2, 4])
        paddle.enable_static()


442 443
class SetBuffersNet1(paddle.nn.Layer):
    def __init__(self):
444
        super().__init__()
445 446 447 448 449 450 451 452 453 454
        self.a = paddle.to_tensor([1])

    @paddle.jit.to_static
    def forward(self):
        self.a = self.a + 1
        return self.a


class SetBuffersNet2(paddle.nn.Layer):
    def __init__(self):
455
        super().__init__()
456 457 458 459 460 461 462 463 464 465
        self.b = paddle.to_tensor([2])

    @paddle.jit.to_static
    def forward(self):
        self.b = None
        self.b = paddle.to_tensor([3])
        return self.b


class TestSetBuffers(unittest.TestCase):
466 467 468 469 470 471 472
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'SetBuffersNet1')

    def tearDown(self):
        self.temp_dir.cleanup()

473 474 475 476 477
    def test_set_buffers1(self):
        paddle.disable_static()
        net = SetBuffersNet1()
        out = net()
        self.assertEqual(out.numpy().tolist(), [2])
478
        paddle.jit.save(net, self.model_path)
479 480 481 482 483 484 485 486 487 488
        paddle.enable_static()

    def test_set_buffers2(self):
        paddle.disable_static()
        net = SetBuffersNet2()
        with self.assertRaises(RuntimeError):
            out = net()
        paddle.enable_static()


489 490 491 492 493 494 495 496 497 498 499 500 501 502
class ClassNoInheritLayer:
    def func(self, x):
        return x + 1


class TestClassNoInheritLayer(unittest.TestCase):
    def test_to_static(self):
        paddle.disable_static()
        net = ClassNoInheritLayer()
        input_spec = [paddle.static.InputSpec(name='x', shape=[1])]
        with self.assertRaises(TypeError):
            static_func = paddle.jit.to_static(net.func, input_spec=input_spec)


503 504
if __name__ == '__main__':
    unittest.main()