test_base_layer.py 21.8 KB
Newer Older
X
polish  
Xin Pan 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16

X
polish  
Xin Pan 已提交
17 18
import numpy as np

19
import paddle
X
polish  
Xin Pan 已提交
20
import paddle.fluid as fluid
21
from paddle.fluid.dygraph import to_variable
22 23 24 25 26 27
from paddle.fluid.framework import (
    EagerParamBase,
    ParamBase,
    _test_eager_guard,
    in_dygraph_mode,
)
28
from paddle.jit import ProgramTranslator
X
polish  
Xin Pan 已提交
29 30


31
class L1(fluid.Layer):
32
    def __init__(self):
33
        super().__init__()
34
        self._param_attr = fluid.ParamAttr(
35 36 37 38 39 40 41 42
            initializer=fluid.initializer.Constant(value=0.1)
        )
        self.w1 = self.create_parameter(
            attr=self._param_attr, shape=[2, 2], dtype='float32', is_bias=False
        )
        self.w2 = self.create_parameter(
            attr=self._param_attr, shape=[2, 2], dtype='float32', is_bias=False
        )
X
polish  
Xin Pan 已提交
43 44 45 46 47

    def forward(self):
        return self.w1 + self.w2


48
class L2(fluid.Layer):
49
    def __init__(self):
50
        super().__init__()
51 52
        self.layer1 = L1()
        self.layer2 = L1()
X
polish  
Xin Pan 已提交
53 54 55 56 57

    def forward(self):
        return self.layer1() + self.layer2()


58
class L3(fluid.Layer):
59
    def __init__(self):
60
        super().__init__()
61 62
        self.layer1 = L2()
        self.layer2 = L2()
X
polish  
Xin Pan 已提交
63 64 65 66 67 68

    def forward(self):
        return self.layer1() + self.layer2()


class TestBaseLayer(unittest.TestCase):
W
wanghuancoder 已提交
69
    def func_test_one_level(self):
L
lujun 已提交
70
        with fluid.dygraph.guard():
71
            l = L1()
X
polish  
Xin Pan 已提交
72
            ret = l()
73 74 75 76 77
            expected_names = ['l1.w1', 'l1.w2']
            idx = 0
            for name, _ in l.named_parameters(prefix='l1'):
                self.assertEqual(name, expected_names[idx])
                idx += 1
78 79 80
            np.testing.assert_allclose(
                ret.numpy(), 0.2 * np.ones([2, 2]), rtol=1e-05
            )
X
polish  
Xin Pan 已提交
81

W
wanghuancoder 已提交
82 83 84 85 86 87
    def test_one_level(self):
        with _test_eager_guard():
            self.func_test_one_level()
        self.func_test_one_level()

    def func_test_three_level(self):
L
lujun 已提交
88
        with fluid.dygraph.guard():
89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
            l = L3()
            expected_names = [
                'l3.layer1.layer1.w1',
                'l3.layer1.layer1.w2',
                'l3.layer1.layer2.w1',
                'l3.layer1.layer2.w2',
                'l3.layer2.layer1.w1',
                'l3.layer2.layer1.w2',
                'l3.layer2.layer2.w1',
                'l3.layer2.layer2.w2',
            ]
            idx = 0
            for name, _ in l.named_parameters(prefix='l3'):
                self.assertEqual(name, expected_names[idx])
                idx += 1
X
polish  
Xin Pan 已提交
104
            ret = l()
105 106 107
            np.testing.assert_allclose(
                ret.numpy(), 0.8 * np.ones([2, 2]), rtol=1e-05
            )
X
polish  
Xin Pan 已提交
108

W
wanghuancoder 已提交
109 110 111 112 113 114
    def test_three_level(self):
        with _test_eager_guard():
            self.func_test_three_level()
        self.func_test_three_level()

    def func_test_add_parameter_with_error(self):
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
        with fluid.dygraph.guard():
            net = fluid.Layer()
            param = net.create_parameter(shape=[1])

            with self.assertRaises(TypeError):
                net.add_parameter(10, param)

            with self.assertRaises(KeyError):
                net.add_parameter("param.name", param)

            with self.assertRaises(KeyError):
                net.add_parameter("", param)

            with self.assertRaises(KeyError):
                net.test_param = 10
                net.add_parameter("test_param", param)

            with self.assertRaises(TypeError):
                net.add_parameter("no_param", 10)

            load_param = net.create_parameter(shape=[1])
            net._loaddict_holder[load_param.name] = load_param
            net.add_parameter("load_param", load_param)

W
wanghuancoder 已提交
139 140 141 142 143
    def test_add_parameter_with_error(self):
        with _test_eager_guard():
            self.func_test_add_parameter_with_error()
        self.func_test_add_parameter_with_error()

X
polish  
Xin Pan 已提交
144

145 146
class BufferLayer(fluid.Layer):
    def __init__(self):
147
        super().__init__()
148 149 150 151 152 153 154 155 156
        buffer_var = to_variable(np.zeros([2, 4]).astype('int32'))
        self.register_buffer("layer_buffer", buffer_var)

    def forward(self):
        pass


class BufferNet(fluid.Layer):
    def __init__(self):
157
        super().__init__()
158
        self.buffer_layer = BufferLayer()
159 160 161
        self.w1 = self.create_parameter(
            shape=[2, 2], dtype='float32', is_bias=False
        )
162 163 164 165 166 167 168 169 170 171
        buffer_var = to_variable(np.ones([2, 4]).astype('int32'))
        self.register_buffer("net_buffer", buffer_var)

        self.new_buffer = to_variable(np.ones([4, 2]).astype('int32'))

    def forward(self):
        pass


class TestBuffer(unittest.TestCase):
W
wanghuancoder 已提交
172
    def func_test_buffers_and_named_buffers(self):
173 174 175 176 177 178 179 180 181 182 183 184 185
        def names(named_buffers):
            return [name for name, _ in named_buffers]

        with fluid.dygraph.guard():
            layer = BufferLayer()
            net = BufferNet()

            self.assertEqual(len(layer.buffers()), 1)
            self.assertEqual(names(layer.named_buffers()), ['layer_buffer'])

            self.assertEqual(len(net.buffers()), 3)
            self.assertEqual(
                names(net.named_buffers()),
186 187
                ['net_buffer', 'new_buffer', 'buffer_layer.layer_buffer'],
            )
188 189

            self.assertEqual(len(net.buffers(include_sublayers=False)), 2)
190 191 192 193
            self.assertEqual(
                names(net.named_buffers(include_sublayers=False)),
                ['net_buffer', 'new_buffer'],
            )
194

W
wanghuancoder 已提交
195 196 197 198 199 200
    def test_buffers_and_named_buffers(self):
        with _test_eager_guard():
            self.func_test_buffers_and_named_buffers()
        self.func_test_buffers_and_named_buffers()

    def func_test_register_buffer_with_error(self):
201 202 203 204
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var = to_variable(np.zeros([1]))

205 206 207
            with self.assertRaisesRegexp(
                TypeError, "name of buffer should be a string"
            ):
208 209
                net.register_buffer(12, var)

210 211 212
            with self.assertRaisesRegexp(
                TypeError, "buffer should be a Paddle.Tensor"
            ):
W
wanghuancoder 已提交
213
                if in_dygraph_mode():
214 215 216
                    net.register_buffer(
                        "buffer_name", EagerParamBase([2, 2], 'float32')
                    )
W
wanghuancoder 已提交
217
                else:
218 219 220
                    net.register_buffer(
                        "buffer_name", ParamBase([2, 2], 'float32')
                    )
221

222 223 224
            with self.assertRaisesRegexp(
                KeyError, "name of buffer can not contain"
            ):
225 226
                net.register_buffer("buffer.name", var)

227 228 229
            with self.assertRaisesRegexp(
                KeyError, "name of buffer can not be empty"
            ):
230 231 232 233 234 235 236
                net.register_buffer("", var)

            net.attr_name = 10
            with self.assertRaisesRegexp(KeyError, "already exists"):
                net.register_buffer("attr_name", var)

            del net.attr_name
W
wanghuancoder 已提交
237 238 239 240
            if in_dygraph_mode():
                net.attr_name = EagerParamBase([2, 2], 'float32')
            else:
                net.attr_name = ParamBase([2, 2], 'float32')
241 242 243
            with self.assertRaisesRegexp(KeyError, "already exists"):
                net.register_buffer("attr_name", var)

W
wanghuancoder 已提交
244 245 246 247 248 249
    def test_register_buffer_with_error(self):
        with _test_eager_guard():
            self.func_test_register_buffer_with_error()
        self.func_test_register_buffer_with_error()

    def func_test_register_buffer_same_name(self):
250 251 252 253 254 255 256 257 258 259 260 261 262
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            var2 = to_variable(np.zeros([2]))
            var3 = to_variable(np.zeros([3]))

            net.register_buffer("buffer_name", var1)
            self.assert_var_base_equal(net.buffer_name, var1)
            net.register_buffer("buffer_name", var2)
            self.assert_var_base_equal(net.buffer_name, var2)
            net.register_buffer("buffer_name", var3)
            self.assert_var_base_equal(net.buffer_name, var3)

W
wanghuancoder 已提交
263 264 265 266 267 268
    def test_register_buffer_same_name(self):
        with _test_eager_guard():
            self.func_test_register_buffer_same_name()
        self.func_test_register_buffer_same_name()

    def func_test_buffer_not_persistable(self):
269 270 271 272 273 274 275 276
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))

            net.register_buffer("buffer_name", var1, persistable=False)
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

W
wanghuancoder 已提交
277 278 279 280 281 282
    def test_buffer_not_persistable(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable()
        self.func_test_buffer_not_persistable()

    def func_test_buffer_not_persistable_del(self):
283 284 285 286 287 288 289
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)
            del net.buffer_name
            self.assertEqual(len(net.buffers()), 0)

W
wanghuancoder 已提交
290 291 292 293 294 295
    def test_buffer_not_persistable_del(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_del()
        self.func_test_buffer_not_persistable_del()

    def func_test_buffer_not_persistable_overwrite(self):
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            var2 = to_variable(np.zeros([2]))
            net.register_buffer("buffer_name", var1, persistable=False)
            net.register_buffer("buffer_name", var2)

            # Allow to overwrite a non-persistable buffer with a persistable var.
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 1)

            net.register_buffer("buffer_name", var1, persistable=False)
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

W
wanghuancoder 已提交
311 312 313 314 315 316
    def test_buffer_not_persistable_overwrite(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_overwrite()
        self.func_test_buffer_not_persistable_overwrite()

    def func_test_buffer_not_persistable_assign(self):
317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)

            # Assigning Nones will remove the buffer, but allow to re-assign
            # to remark it as buffer.
            net.buffer_name = None
            self.assertEqual(len(net.buffers()), 0)
            self.assertEqual(len(net.state_dict()), 0)

            net.buffer_name = var1
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

            # Re-assign a ParamBase will remove the buffer.
W
wanghuancoder 已提交
333 334 335 336
            if in_dygraph_mode():
                net.buffer_name = EagerParamBase([2, 2], 'float32')
            else:
                net.buffer_name = ParamBase([2, 2], 'float32')
337 338 339
            self.assertEqual(len(net.buffers()), 0)
            self.assertEqual(len(net.state_dict()), 1)

W
wanghuancoder 已提交
340 341 342 343 344 345
    def test_buffer_not_persistable_assign(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_assign()
        self.func_test_buffer_not_persistable_assign()

    def func_test_buffer_not_persistable_load(self):
346 347 348 349 350 351
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)
            net.load_dict({})

W
wanghuancoder 已提交
352 353 354 355 356 357
    def test_buffer_not_persistable_load(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_load()
        self.func_test_buffer_not_persistable_load()

    def func_test_buffer_state_dict(self):
358 359 360 361 362 363 364 365
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([2, 3]))
            var2 = to_variable(np.zeros([3, 2]))
            net.register_buffer("buffer_var1", var1)
            net.register_buffer("buffer_var2", var2, persistable=False)

            self.assertEqual(len(net.state_dict()), 1)
366 367 368
            self.assertEqual(
                [name for name, _ in net.state_dict().items()], ["buffer_var1"]
            )
369 370 371 372 373 374 375 376 377

            # load state_dict
            net_load = fluid.Layer()
            var = to_variable(np.ones([2, 3]))
            net_load.register_buffer("buffer_var1", var)
            net_load.load_dict(net.state_dict())

            self.assert_var_base_equal(net_load.buffer_var1, var1)

W
wanghuancoder 已提交
378 379 380 381 382
    def test_buffer_state_dict(self):
        with _test_eager_guard():
            self.func_test_buffer_state_dict()
        self.func_test_buffer_state_dict()

383
    def assert_var_base_equal(self, var1, var2):
384
        np.testing.assert_array_equal(var1.numpy(), var2.numpy())
385 386


387 388
class BufferNetWithModification(paddle.nn.Layer):
    def __init__(self, shape):
389
        super().__init__()
390 391 392 393 394 395 396 397 398 399 400 401 402 403 404

        self.buffer1 = paddle.zeros(shape, 'int32')
        self.buffer2 = paddle.zeros(shape, 'int32')

    @paddle.jit.to_static
    def forward(self, x):
        self.buffer1 += x
        self.buffer2 = self.buffer1 + x

        out = self.buffer1 + self.buffer2

        return out


class TestModifiedBuffer(unittest.TestCase):
W
wanghuancoder 已提交
405
    def funcsetUp(self):
406 407 408 409 410 411 412 413 414 415 416 417 418
        paddle.disable_static()
        self.prog_trans = ProgramTranslator()
        self.shape = [10, 16]

    def _run(self, to_static=False):
        self.prog_trans.enable(to_static)

        x = paddle.ones([1], 'int32')
        net = BufferNetWithModification(self.shape)
        out = net(x)

        return out, net.buffer1, net.buffer2

W
wanghuancoder 已提交
419 420
    def func_test_modified(self):
        self.funcsetUp()
421 422 423 424
        dy_outs = self._run(False)
        st_outs = self._run(True)

        for i in range(len(dy_outs)):
425 426 427
            np.testing.assert_array_equal(
                dy_outs[i].numpy(), st_outs[i].numpy()
            )
428

W
wanghuancoder 已提交
429 430 431 432 433
    def test_modified(self):
        with _test_eager_guard():
            self.func_test_modified()
        self.func_test_modified()

434

C
chentianyu03 已提交
435
class TestLayerTo(unittest.TestCase):
W
wanghuancoder 已提交
436
    def funcsetUp(self):
C
chentianyu03 已提交
437 438 439 440 441 442 443 444
        paddle.disable_static()
        self.linear = paddle.nn.Linear(2, 2)
        self.new_grad = np.random.random([2, 2])
        self.linear.weight._set_grad_ivar(paddle.to_tensor(self.new_grad))
        buffer = paddle.to_tensor([0.0], dtype='float32')
        self.linear.register_buffer("buf_name", buffer, persistable=True)

        sublayer = paddle.nn.Conv1D(3, 2, 3)
445
        self.linear.add_sublayer("1", sublayer)
C
chentianyu03 已提交
446

W
wanghuancoder 已提交
447
    def func_test_to_api(self):
C
chentianyu03 已提交
448
        self.linear.to(dtype='double')
449 450 451 452 453 454 455 456 457 458 459 460 461
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
C
chentianyu03 已提交
462 463

        self.linear.to()
464 465 466 467 468 469 470 471 472 473 474 475 476
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
477
        for p in self.linear.parameters():
W
wanghuancoder 已提交
478 479
            if in_dygraph_mode():
                self.assertTrue(
480 481
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
482 483
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))
C
chentianyu03 已提交
484 485 486 487 488 489 490

        if paddle.fluid.is_compiled_with_cuda():
            self.linear.to(device=paddle.CUDAPlace(0))
            self.assertTrue(self.linear.weight.place.is_gpu_place())
            self.assertEqual(self.linear.weight.place.gpu_device_id(), 0)
            self.assertTrue(self.linear.buf_name.place.is_gpu_place())
            self.assertEqual(self.linear.buf_name.place.gpu_device_id(), 0)
491
            self.assertTrue(
492 493
                self.linear.weight._grad_ivar().place.is_gpu_place()
            )
C
chentianyu03 已提交
494
            self.assertEqual(
495 496
                self.linear.weight._grad_ivar().place.gpu_device_id(), 0
            )
C
chentianyu03 已提交
497 498 499 500 501 502

            self.linear.to(device='gpu:0')
            self.assertTrue(self.linear.weight.place.is_gpu_place())
            self.assertEqual(self.linear.weight.place.gpu_device_id(), 0)
            self.assertTrue(self.linear.buf_name.place.is_gpu_place())
            self.assertEqual(self.linear.buf_name.place.gpu_device_id(), 0)
503
            self.assertTrue(
504 505
                self.linear.weight._grad_ivar().place.is_gpu_place()
            )
C
chentianyu03 已提交
506
            self.assertEqual(
507 508
                self.linear.weight._grad_ivar().place.gpu_device_id(), 0
            )
509
            for p in self.linear.parameters():
W
wanghuancoder 已提交
510 511
                if in_dygraph_mode():
                    self.assertTrue(
512 513
                        isinstance(p, paddle.fluid.framework.EagerParamBase)
                    )
W
wanghuancoder 已提交
514 515
                else:
                    self.assertTrue(
516 517
                        isinstance(p, paddle.fluid.framework.ParamBase)
                    )
C
chentianyu03 已提交
518 519 520 521 522 523 524 525 526 527 528 529 530 531 532

        self.linear.to(device=paddle.CPUPlace())
        self.assertTrue(self.linear.weight.place.is_cpu_place())
        self.assertTrue(self.linear.buf_name.place.is_cpu_place())
        self.assertTrue(self.linear.weight._grad_ivar().place.is_cpu_place())

        self.linear.to(device='cpu')
        self.assertTrue(self.linear.weight.place.is_cpu_place())
        self.assertTrue(self.linear.buf_name.place.is_cpu_place())
        self.assertTrue(self.linear.weight._grad_ivar().place.is_cpu_place())

        self.assertRaises(ValueError, self.linear.to, device=1)

        self.assertRaises(AssertionError, self.linear.to, blocking=1)

W
wanghuancoder 已提交
533
    def func_test_to_api_paddle_dtype(self):
534
        self.linear.to(dtype=paddle.float64)
535 536 537 538 539 540 541 542 543 544 545 546 547
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
548 549

        self.linear.to()
550 551 552 553 554 555 556 557 558 559 560 561 562
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
563
        for p in self.linear.parameters():
W
wanghuancoder 已提交
564 565
            if in_dygraph_mode():
                self.assertTrue(
566 567
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
568 569
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))
570

W
wanghuancoder 已提交
571
    def func_test_to_api_numpy_dtype(self):
572
        self.linear.to(dtype=np.float64)
573 574 575 576 577 578 579 580 581 582 583 584 585
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
586 587

        self.linear.to()
588 589 590 591 592 593 594 595 596 597 598 599 600
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
601
        for p in self.linear.parameters():
W
wanghuancoder 已提交
602 603
            if in_dygraph_mode():
                self.assertTrue(
604 605
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
606 607 608
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))

609 610 611 612 613
    def func_test_to_api_none_buffer(self):
        model = paddle.nn.Linear(2, 4)
        buffer = None
        model.register_buffer("buf_name", buffer, persistable=True)
        model.to(dtype='float64')
614
        self.assertIsNone(model._buffers['buf_name'])
615

W
wanghuancoder 已提交
616 617 618 619 620 621
    def test_main(self):
        with _test_eager_guard():
            self.funcsetUp()
            self.func_test_to_api()
            self.func_test_to_api_paddle_dtype()
            self.func_test_to_api_numpy_dtype()
622
            self.func_test_to_api_none_buffer()
W
wanghuancoder 已提交
623 624 625 626
        self.funcsetUp()
        self.func_test_to_api()
        self.func_test_to_api_paddle_dtype()
        self.func_test_to_api_numpy_dtype()
627
        self.func_test_to_api_none_buffer()
628

C
chentianyu03 已提交
629

X
polish  
Xin Pan 已提交
630
if __name__ == '__main__':
H
hong 已提交
631
    paddle.enable_static()
X
polish  
Xin Pan 已提交
632
    unittest.main()