test_base_layer.py 21.9 KB
Newer Older
X
polish  
Xin Pan 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np

18
import paddle
X
polish  
Xin Pan 已提交
19
import paddle.fluid as fluid
20
from paddle.fluid.dygraph import to_variable
W
wanghuancoder 已提交
21
from paddle.fluid.framework import ParamBase, EagerParamBase
22
from paddle.jit import ProgramTranslator
W
wanghuancoder 已提交
23
from paddle.fluid.framework import _test_eager_guard, in_dygraph_mode
X
polish  
Xin Pan 已提交
24 25


26
class L1(fluid.Layer):
27 28
    def __init__(self):
        super(L1, self).__init__()
29
        self._param_attr = fluid.ParamAttr(
30 31 32 33 34 35 36 37
            initializer=fluid.initializer.Constant(value=0.1)
        )
        self.w1 = self.create_parameter(
            attr=self._param_attr, shape=[2, 2], dtype='float32', is_bias=False
        )
        self.w2 = self.create_parameter(
            attr=self._param_attr, shape=[2, 2], dtype='float32', is_bias=False
        )
X
polish  
Xin Pan 已提交
38 39 40 41 42

    def forward(self):
        return self.w1 + self.w2


43
class L2(fluid.Layer):
44 45 46 47
    def __init__(self):
        super(L2, self).__init__()
        self.layer1 = L1()
        self.layer2 = L1()
X
polish  
Xin Pan 已提交
48 49 50 51 52

    def forward(self):
        return self.layer1() + self.layer2()


53
class L3(fluid.Layer):
54 55 56 57
    def __init__(self):
        super(L3, self).__init__()
        self.layer1 = L2()
        self.layer2 = L2()
X
polish  
Xin Pan 已提交
58 59 60 61 62 63

    def forward(self):
        return self.layer1() + self.layer2()


class TestBaseLayer(unittest.TestCase):
W
wanghuancoder 已提交
64
    def func_test_one_level(self):
L
lujun 已提交
65
        with fluid.dygraph.guard():
66
            l = L1()
X
polish  
Xin Pan 已提交
67
            ret = l()
68 69 70 71 72
            expected_names = ['l1.w1', 'l1.w2']
            idx = 0
            for name, _ in l.named_parameters(prefix='l1'):
                self.assertEqual(name, expected_names[idx])
                idx += 1
73 74 75
            np.testing.assert_allclose(
                ret.numpy(), 0.2 * np.ones([2, 2]), rtol=1e-05
            )
X
polish  
Xin Pan 已提交
76

W
wanghuancoder 已提交
77 78 79 80 81 82
    def test_one_level(self):
        with _test_eager_guard():
            self.func_test_one_level()
        self.func_test_one_level()

    def func_test_three_level(self):
L
lujun 已提交
83
        with fluid.dygraph.guard():
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98
            l = L3()
            expected_names = [
                'l3.layer1.layer1.w1',
                'l3.layer1.layer1.w2',
                'l3.layer1.layer2.w1',
                'l3.layer1.layer2.w2',
                'l3.layer2.layer1.w1',
                'l3.layer2.layer1.w2',
                'l3.layer2.layer2.w1',
                'l3.layer2.layer2.w2',
            ]
            idx = 0
            for name, _ in l.named_parameters(prefix='l3'):
                self.assertEqual(name, expected_names[idx])
                idx += 1
X
polish  
Xin Pan 已提交
99
            ret = l()
100 101 102
            np.testing.assert_allclose(
                ret.numpy(), 0.8 * np.ones([2, 2]), rtol=1e-05
            )
X
polish  
Xin Pan 已提交
103

W
wanghuancoder 已提交
104 105 106 107 108 109
    def test_three_level(self):
        with _test_eager_guard():
            self.func_test_three_level()
        self.func_test_three_level()

    def func_test_add_parameter_with_error(self):
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
        with fluid.dygraph.guard():
            net = fluid.Layer()
            param = net.create_parameter(shape=[1])

            with self.assertRaises(TypeError):
                net.add_parameter(10, param)

            with self.assertRaises(KeyError):
                net.add_parameter("param.name", param)

            with self.assertRaises(KeyError):
                net.add_parameter("", param)

            with self.assertRaises(KeyError):
                net.test_param = 10
                net.add_parameter("test_param", param)

            with self.assertRaises(TypeError):
                net.add_parameter("no_param", 10)

            load_param = net.create_parameter(shape=[1])
            net._loaddict_holder[load_param.name] = load_param
            net.add_parameter("load_param", load_param)

W
wanghuancoder 已提交
134 135 136 137 138
    def test_add_parameter_with_error(self):
        with _test_eager_guard():
            self.func_test_add_parameter_with_error()
        self.func_test_add_parameter_with_error()

X
polish  
Xin Pan 已提交
139

140 141 142 143 144 145 146 147 148 149 150 151 152 153
class BufferLayer(fluid.Layer):
    def __init__(self):
        super(BufferLayer, self).__init__()
        buffer_var = to_variable(np.zeros([2, 4]).astype('int32'))
        self.register_buffer("layer_buffer", buffer_var)

    def forward(self):
        pass


class BufferNet(fluid.Layer):
    def __init__(self):
        super(BufferNet, self).__init__()
        self.buffer_layer = BufferLayer()
154 155 156
        self.w1 = self.create_parameter(
            shape=[2, 2], dtype='float32', is_bias=False
        )
157 158 159 160 161 162 163 164 165 166
        buffer_var = to_variable(np.ones([2, 4]).astype('int32'))
        self.register_buffer("net_buffer", buffer_var)

        self.new_buffer = to_variable(np.ones([4, 2]).astype('int32'))

    def forward(self):
        pass


class TestBuffer(unittest.TestCase):
W
wanghuancoder 已提交
167
    def func_test_buffers_and_named_buffers(self):
168 169 170 171 172 173 174 175 176 177 178 179 180
        def names(named_buffers):
            return [name for name, _ in named_buffers]

        with fluid.dygraph.guard():
            layer = BufferLayer()
            net = BufferNet()

            self.assertEqual(len(layer.buffers()), 1)
            self.assertEqual(names(layer.named_buffers()), ['layer_buffer'])

            self.assertEqual(len(net.buffers()), 3)
            self.assertEqual(
                names(net.named_buffers()),
181 182
                ['net_buffer', 'new_buffer', 'buffer_layer.layer_buffer'],
            )
183 184

            self.assertEqual(len(net.buffers(include_sublayers=False)), 2)
185 186 187 188
            self.assertEqual(
                names(net.named_buffers(include_sublayers=False)),
                ['net_buffer', 'new_buffer'],
            )
189

W
wanghuancoder 已提交
190 191 192 193 194 195
    def test_buffers_and_named_buffers(self):
        with _test_eager_guard():
            self.func_test_buffers_and_named_buffers()
        self.func_test_buffers_and_named_buffers()

    def func_test_register_buffer_with_error(self):
196 197 198 199
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var = to_variable(np.zeros([1]))

200 201 202
            with self.assertRaisesRegexp(
                TypeError, "name of buffer should be a string"
            ):
203 204
                net.register_buffer(12, var)

205 206 207
            with self.assertRaisesRegexp(
                TypeError, "buffer should be a Paddle.Tensor"
            ):
W
wanghuancoder 已提交
208
                if in_dygraph_mode():
209 210 211
                    net.register_buffer(
                        "buffer_name", EagerParamBase([2, 2], 'float32')
                    )
W
wanghuancoder 已提交
212
                else:
213 214 215
                    net.register_buffer(
                        "buffer_name", ParamBase([2, 2], 'float32')
                    )
216

217 218 219
            with self.assertRaisesRegexp(
                KeyError, "name of buffer can not contain"
            ):
220 221
                net.register_buffer("buffer.name", var)

222 223 224
            with self.assertRaisesRegexp(
                KeyError, "name of buffer can not be empty"
            ):
225 226 227 228 229 230 231
                net.register_buffer("", var)

            net.attr_name = 10
            with self.assertRaisesRegexp(KeyError, "already exists"):
                net.register_buffer("attr_name", var)

            del net.attr_name
W
wanghuancoder 已提交
232 233 234 235
            if in_dygraph_mode():
                net.attr_name = EagerParamBase([2, 2], 'float32')
            else:
                net.attr_name = ParamBase([2, 2], 'float32')
236 237 238
            with self.assertRaisesRegexp(KeyError, "already exists"):
                net.register_buffer("attr_name", var)

W
wanghuancoder 已提交
239 240 241 242 243 244
    def test_register_buffer_with_error(self):
        with _test_eager_guard():
            self.func_test_register_buffer_with_error()
        self.func_test_register_buffer_with_error()

    def func_test_register_buffer_same_name(self):
245 246 247 248 249 250 251 252 253 254 255 256 257
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            var2 = to_variable(np.zeros([2]))
            var3 = to_variable(np.zeros([3]))

            net.register_buffer("buffer_name", var1)
            self.assert_var_base_equal(net.buffer_name, var1)
            net.register_buffer("buffer_name", var2)
            self.assert_var_base_equal(net.buffer_name, var2)
            net.register_buffer("buffer_name", var3)
            self.assert_var_base_equal(net.buffer_name, var3)

W
wanghuancoder 已提交
258 259 260 261 262 263
    def test_register_buffer_same_name(self):
        with _test_eager_guard():
            self.func_test_register_buffer_same_name()
        self.func_test_register_buffer_same_name()

    def func_test_buffer_not_persistable(self):
264 265 266 267 268 269 270 271
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))

            net.register_buffer("buffer_name", var1, persistable=False)
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

W
wanghuancoder 已提交
272 273 274 275 276 277
    def test_buffer_not_persistable(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable()
        self.func_test_buffer_not_persistable()

    def func_test_buffer_not_persistable_del(self):
278 279 280 281 282 283 284
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)
            del net.buffer_name
            self.assertEqual(len(net.buffers()), 0)

W
wanghuancoder 已提交
285 286 287 288 289 290
    def test_buffer_not_persistable_del(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_del()
        self.func_test_buffer_not_persistable_del()

    def func_test_buffer_not_persistable_overwrite(self):
291 292 293 294 295 296 297 298 299 300 301 302 303 304 305
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            var2 = to_variable(np.zeros([2]))
            net.register_buffer("buffer_name", var1, persistable=False)
            net.register_buffer("buffer_name", var2)

            # Allow to overwrite a non-persistable buffer with a persistable var.
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 1)

            net.register_buffer("buffer_name", var1, persistable=False)
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

W
wanghuancoder 已提交
306 307 308 309 310 311
    def test_buffer_not_persistable_overwrite(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_overwrite()
        self.func_test_buffer_not_persistable_overwrite()

    def func_test_buffer_not_persistable_assign(self):
312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)

            # Assigning Nones will remove the buffer, but allow to re-assign
            # to remark it as buffer.
            net.buffer_name = None
            self.assertEqual(len(net.buffers()), 0)
            self.assertEqual(len(net.state_dict()), 0)

            net.buffer_name = var1
            self.assertEqual(len(net.buffers()), 1)
            self.assertEqual(len(net.state_dict()), 0)

            # Re-assign a ParamBase will remove the buffer.
W
wanghuancoder 已提交
328 329 330 331
            if in_dygraph_mode():
                net.buffer_name = EagerParamBase([2, 2], 'float32')
            else:
                net.buffer_name = ParamBase([2, 2], 'float32')
332 333 334
            self.assertEqual(len(net.buffers()), 0)
            self.assertEqual(len(net.state_dict()), 1)

W
wanghuancoder 已提交
335 336 337 338 339 340
    def test_buffer_not_persistable_assign(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_assign()
        self.func_test_buffer_not_persistable_assign()

    def func_test_buffer_not_persistable_load(self):
341 342 343 344 345 346
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([1]))
            net.register_buffer("buffer_name", var1, persistable=False)
            net.load_dict({})

W
wanghuancoder 已提交
347 348 349 350 351 352
    def test_buffer_not_persistable_load(self):
        with _test_eager_guard():
            self.func_test_buffer_not_persistable_load()
        self.func_test_buffer_not_persistable_load()

    def func_test_buffer_state_dict(self):
353 354 355 356 357 358 359 360
        with fluid.dygraph.guard():
            net = fluid.Layer()
            var1 = to_variable(np.zeros([2, 3]))
            var2 = to_variable(np.zeros([3, 2]))
            net.register_buffer("buffer_var1", var1)
            net.register_buffer("buffer_var2", var2, persistable=False)

            self.assertEqual(len(net.state_dict()), 1)
361 362 363
            self.assertEqual(
                [name for name, _ in net.state_dict().items()], ["buffer_var1"]
            )
364 365 366 367 368 369 370 371 372

            # load state_dict
            net_load = fluid.Layer()
            var = to_variable(np.ones([2, 3]))
            net_load.register_buffer("buffer_var1", var)
            net_load.load_dict(net.state_dict())

            self.assert_var_base_equal(net_load.buffer_var1, var1)

W
wanghuancoder 已提交
373 374 375 376 377
    def test_buffer_state_dict(self):
        with _test_eager_guard():
            self.func_test_buffer_state_dict()
        self.func_test_buffer_state_dict()

378
    def assert_var_base_equal(self, var1, var2):
379
        np.testing.assert_array_equal(var1.numpy(), var2.numpy())
380 381


382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399
class BufferNetWithModification(paddle.nn.Layer):
    def __init__(self, shape):
        super(BufferNetWithModification, self).__init__()

        self.buffer1 = paddle.zeros(shape, 'int32')
        self.buffer2 = paddle.zeros(shape, 'int32')

    @paddle.jit.to_static
    def forward(self, x):
        self.buffer1 += x
        self.buffer2 = self.buffer1 + x

        out = self.buffer1 + self.buffer2

        return out


class TestModifiedBuffer(unittest.TestCase):
W
wanghuancoder 已提交
400
    def funcsetUp(self):
401 402 403 404 405 406 407 408 409 410 411 412 413
        paddle.disable_static()
        self.prog_trans = ProgramTranslator()
        self.shape = [10, 16]

    def _run(self, to_static=False):
        self.prog_trans.enable(to_static)

        x = paddle.ones([1], 'int32')
        net = BufferNetWithModification(self.shape)
        out = net(x)

        return out, net.buffer1, net.buffer2

W
wanghuancoder 已提交
414 415
    def func_test_modified(self):
        self.funcsetUp()
416 417 418 419
        dy_outs = self._run(False)
        st_outs = self._run(True)

        for i in range(len(dy_outs)):
420 421 422
            np.testing.assert_array_equal(
                dy_outs[i].numpy(), st_outs[i].numpy()
            )
423

W
wanghuancoder 已提交
424 425 426 427 428
    def test_modified(self):
        with _test_eager_guard():
            self.func_test_modified()
        self.func_test_modified()

429

C
chentianyu03 已提交
430
class TestLayerTo(unittest.TestCase):
W
wanghuancoder 已提交
431
    def funcsetUp(self):
C
chentianyu03 已提交
432 433 434 435 436 437 438 439
        paddle.disable_static()
        self.linear = paddle.nn.Linear(2, 2)
        self.new_grad = np.random.random([2, 2])
        self.linear.weight._set_grad_ivar(paddle.to_tensor(self.new_grad))
        buffer = paddle.to_tensor([0.0], dtype='float32')
        self.linear.register_buffer("buf_name", buffer, persistable=True)

        sublayer = paddle.nn.Conv1D(3, 2, 3)
440
        self.linear.add_sublayer("1", sublayer)
C
chentianyu03 已提交
441

W
wanghuancoder 已提交
442
    def func_test_to_api(self):
C
chentianyu03 已提交
443
        self.linear.to(dtype='double')
444 445 446 447 448 449 450 451 452 453 454 455 456
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
C
chentianyu03 已提交
457 458

        self.linear.to()
459 460 461 462 463 464 465 466 467 468 469 470 471
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
472
        for p in self.linear.parameters():
W
wanghuancoder 已提交
473 474
            if in_dygraph_mode():
                self.assertTrue(
475 476
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
477 478
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))
C
chentianyu03 已提交
479 480 481 482 483 484 485

        if paddle.fluid.is_compiled_with_cuda():
            self.linear.to(device=paddle.CUDAPlace(0))
            self.assertTrue(self.linear.weight.place.is_gpu_place())
            self.assertEqual(self.linear.weight.place.gpu_device_id(), 0)
            self.assertTrue(self.linear.buf_name.place.is_gpu_place())
            self.assertEqual(self.linear.buf_name.place.gpu_device_id(), 0)
486
            self.assertTrue(
487 488
                self.linear.weight._grad_ivar().place.is_gpu_place()
            )
C
chentianyu03 已提交
489
            self.assertEqual(
490 491
                self.linear.weight._grad_ivar().place.gpu_device_id(), 0
            )
C
chentianyu03 已提交
492 493 494 495 496 497

            self.linear.to(device='gpu:0')
            self.assertTrue(self.linear.weight.place.is_gpu_place())
            self.assertEqual(self.linear.weight.place.gpu_device_id(), 0)
            self.assertTrue(self.linear.buf_name.place.is_gpu_place())
            self.assertEqual(self.linear.buf_name.place.gpu_device_id(), 0)
498
            self.assertTrue(
499 500
                self.linear.weight._grad_ivar().place.is_gpu_place()
            )
C
chentianyu03 已提交
501
            self.assertEqual(
502 503
                self.linear.weight._grad_ivar().place.gpu_device_id(), 0
            )
504
            for p in self.linear.parameters():
W
wanghuancoder 已提交
505 506
                if in_dygraph_mode():
                    self.assertTrue(
507 508
                        isinstance(p, paddle.fluid.framework.EagerParamBase)
                    )
W
wanghuancoder 已提交
509 510
                else:
                    self.assertTrue(
511 512
                        isinstance(p, paddle.fluid.framework.ParamBase)
                    )
C
chentianyu03 已提交
513 514 515 516 517 518 519 520 521 522 523 524 525 526 527

        self.linear.to(device=paddle.CPUPlace())
        self.assertTrue(self.linear.weight.place.is_cpu_place())
        self.assertTrue(self.linear.buf_name.place.is_cpu_place())
        self.assertTrue(self.linear.weight._grad_ivar().place.is_cpu_place())

        self.linear.to(device='cpu')
        self.assertTrue(self.linear.weight.place.is_cpu_place())
        self.assertTrue(self.linear.buf_name.place.is_cpu_place())
        self.assertTrue(self.linear.weight._grad_ivar().place.is_cpu_place())

        self.assertRaises(ValueError, self.linear.to, device=1)

        self.assertRaises(AssertionError, self.linear.to, blocking=1)

W
wanghuancoder 已提交
528
    def func_test_to_api_paddle_dtype(self):
529
        self.linear.to(dtype=paddle.float64)
530 531 532 533 534 535 536 537 538 539 540 541 542
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
543 544

        self.linear.to()
545 546 547 548 549 550 551 552 553 554 555 556 557
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
558
        for p in self.linear.parameters():
W
wanghuancoder 已提交
559 560
            if in_dygraph_mode():
                self.assertTrue(
561 562
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
563 564
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))
565

W
wanghuancoder 已提交
566
    def func_test_to_api_numpy_dtype(self):
567
        self.linear.to(dtype=np.float64)
568 569 570 571 572 573 574 575 576 577 578 579 580
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
581 582

        self.linear.to()
583 584 585 586 587 588 589 590 591 592 593 594 595
        self.assertEqual(
            self.linear.weight.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        self.assertEqual(
            self.linear.buf_name.dtype, paddle.fluid.core.VarDesc.VarType.FP64
        )
        np.testing.assert_allclose(
            self.linear.weight.grad.numpy(), self.new_grad, rtol=1e-05
        )
        self.assertEqual(
            self.linear.weight._grad_ivar().dtype,
            paddle.fluid.core.VarDesc.VarType.FP64,
        )
596
        for p in self.linear.parameters():
W
wanghuancoder 已提交
597 598
            if in_dygraph_mode():
                self.assertTrue(
599 600
                    isinstance(p, paddle.fluid.framework.EagerParamBase)
                )
W
wanghuancoder 已提交
601 602 603
            else:
                self.assertTrue(isinstance(p, paddle.fluid.framework.ParamBase))

604 605 606 607 608 609 610
    def func_test_to_api_none_buffer(self):
        model = paddle.nn.Linear(2, 4)
        buffer = None
        model.register_buffer("buf_name", buffer, persistable=True)
        model.to(dtype='float64')
        self.assertEqual(model._buffers['buf_name'], None)

W
wanghuancoder 已提交
611 612 613 614 615 616
    def test_main(self):
        with _test_eager_guard():
            self.funcsetUp()
            self.func_test_to_api()
            self.func_test_to_api_paddle_dtype()
            self.func_test_to_api_numpy_dtype()
617
            self.func_test_to_api_none_buffer()
W
wanghuancoder 已提交
618 619 620 621
        self.funcsetUp()
        self.func_test_to_api()
        self.func_test_to_api_paddle_dtype()
        self.func_test_to_api_numpy_dtype()
622
        self.func_test_to_api_none_buffer()
623

C
chentianyu03 已提交
624

X
polish  
Xin Pan 已提交
625
if __name__ == '__main__':
H
hong 已提交
626
    paddle.enable_static()
X
polish  
Xin Pan 已提交
627
    unittest.main()