test_static_save_load.py 71.8 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


16 17 18 19
import errno
import os
import pickle
import tempfile
H
hong 已提交
20
import unittest
21 22 23 24

import numpy as np
from test_imperative_base import new_program_scope

25
import paddle
H
hong 已提交
26 27 28 29 30
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam

31 32
paddle.enable_static()

H
hong 已提交
33 34

class SimpleLSTMRNN(fluid.Layer):
35 36 37 38 39 40 41 42 43
    def __init__(
        self,
        name_scope,
        hidden_size,
        num_steps,
        num_layers=2,
        init_scale=0.1,
        dropout=None,
    ):
44
        super().__init__()
H
hong 已提交
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
        self._hidden_size = hidden_size
        self._num_layers = num_layers
        self._init_scale = init_scale
        self._dropout = dropout
        self._input = None
        self._num_steps = num_steps
        self.cell_array = []
        self.hidden_array = []

        self.weight_1_arr = []
        self.weight_2_arr = []
        self.bias_arr = []
        self.mask_array = []

        for i in range(self._num_layers):
            weight_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
63 64 65
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
66 67 68
                shape=[self._hidden_size * 2, self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.UniformInitializer(
69 70 71
                    low=-self._init_scale, high=self._init_scale
                ),
            )
H
hong 已提交
72 73 74 75
            self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
            bias_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
76 77 78
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
79 80
                shape=[self._hidden_size * 4],
                dtype="float32",
81 82
                default_initializer=fluid.initializer.Constant(0.0),
            )
H
hong 已提交
83 84 85 86 87 88 89
            self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))

    def forward(self, input_embedding, init_hidden=None, init_cell=None):
        self.cell_array = []
        self.hidden_array = []

        for i in range(self._num_layers):
2
201716010711 已提交
90
            pre_hidden = paddle.slice(
91 92
                init_hidden, axes=[0], starts=[i], ends=[i + 1]
            )
2
201716010711 已提交
93
            pre_cell = paddle.slice(
94 95
                init_cell, axes=[0], starts=[i], ends=[i + 1]
            )
96
            pre_hidden = paddle.reshape(
97 98
                pre_hidden, shape=[-1, self._hidden_size]
            )
99
            pre_cell = paddle.reshape(pre_cell, shape=[-1, self._hidden_size])
H
hong 已提交
100 101 102 103 104
            self.hidden_array.append(pre_hidden)
            self.cell_array.append(pre_cell)

        res = []
        for index in range(self._num_steps):
2
201716010711 已提交
105
            self._input = paddle.slice(
106 107
                input_embedding, axes=[1], starts=[index], ends=[index + 1]
            )
108
            self._input = paddle.reshape(
109 110
                self._input, shape=[-1, self._hidden_size]
            )
H
hong 已提交
111 112 113 114 115 116 117
            for k in range(self._num_layers):
                pre_hidden = self.hidden_array[k]
                pre_cell = self.cell_array[k]
                weight_1 = self.weight_1_arr[k]
                bias = self.bias_arr[k]

                nn = fluid.layers.concat([self._input, pre_hidden], 1)
K
kangguangli 已提交
118
                gate_input = paddle.matmul(x=nn, y=weight_1)
H
hong 已提交
119

120
                gate_input = paddle.add(gate_input, bias)
121 122
                i, j, f, o = paddle.split(
                    gate_input, num_or_sections=4, axis=-1
123
                )
124 125 126 127
                c = pre_cell * paddle.nn.functional.sigmoid(
                    f
                ) + paddle.nn.functional.sigmoid(i) * paddle.tanh(j)
                m = paddle.tanh(c) * paddle.nn.functional.sigmoid(o)
H
hong 已提交
128 129 130 131 132
                self.hidden_array[k] = m
                self.cell_array[k] = c
                self._input = m

                if self._dropout is not None and self._dropout > 0.0:
C
ccrrong 已提交
133
                    self._input = paddle.nn.functional.dropout(
H
hong 已提交
134
                        self._input,
C
ccrrong 已提交
135 136
                        p=self._dropout,
                        mode='upscale_in_train',
137
                    )
H
hong 已提交
138
            res.append(
139
                paddle.reshape(self._input, shape=[1, -1, self._hidden_size])
140
            )
H
hong 已提交
141
        real_res = fluid.layers.concat(res, 0)
142
        real_res = paddle.transpose(x=real_res, perm=[1, 0, 2])
H
hong 已提交
143
        last_hidden = fluid.layers.concat(self.hidden_array, 1)
144
        last_hidden = paddle.reshape(
145 146
            last_hidden, shape=[-1, self._num_layers, self._hidden_size]
        )
147
        last_hidden = paddle.transpose(x=last_hidden, perm=[1, 0, 2])
H
hong 已提交
148
        last_cell = fluid.layers.concat(self.cell_array, 1)
149
        last_cell = paddle.reshape(
150 151
            last_cell, shape=[-1, self._num_layers, self._hidden_size]
        )
152
        last_cell = paddle.transpose(x=last_cell, perm=[1, 0, 2])
H
hong 已提交
153 154 155 156
        return real_res, last_hidden, last_cell


class PtbModel(fluid.Layer):
157 158 159 160 161 162 163 164 165 166
    def __init__(
        self,
        name_scope,
        hidden_size,
        vocab_size,
        num_layers=2,
        num_steps=20,
        init_scale=0.1,
        dropout=None,
    ):
167
        super().__init__()
H
hong 已提交
168 169 170 171 172 173
        self.hidden_size = hidden_size
        self.vocab_size = vocab_size
        self.init_scale = init_scale
        self.num_layers = num_layers
        self.num_steps = num_steps
        self.dropout = dropout
174 175 176 177 178 179 180 181
        self.simple_lstm_rnn = SimpleLSTMRNN(
            self.full_name(),
            hidden_size,
            num_steps,
            num_layers=num_layers,
            init_scale=init_scale,
            dropout=dropout,
        )
182 183 184 185
        self.embedding = paddle.nn.Embedding(
            num_embeddings=vocab_size,
            embedding_dim=hidden_size,
            weight_attr=fluid.ParamAttr(
H
hong 已提交
186 187
                name='embedding_para',
                initializer=fluid.initializer.UniformInitializer(
188 189 190 191
                    low=-init_scale, high=init_scale
                ),
            ),
        )
H
hong 已提交
192 193 194 195 196
        self.softmax_weight = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.hidden_size, self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
197 198 199
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
200 201 202 203 204
        self.softmax_bias = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
205 206 207
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
208 209

    def forward(self, input, label, init_hidden, init_cell):
210
        init_h = paddle.reshape(
211 212
            init_hidden, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
213

214
        init_c = paddle.reshape(
215 216
            init_cell, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
217

218 219
        # NPU 'tok_k' kernel only support `int32` dtype, so cast `input` from `int64` to `int32`.
        input = fluid.layers.cast(input, "int32")
H
hong 已提交
220
        x_emb = self.embedding(input)
221
        x_emb = paddle.reshape(
222 223
            x_emb, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
224
        if self.dropout is not None and self.dropout > 0.0:
C
ccrrong 已提交
225
            x_emb = paddle.nn.functional.dropout(
H
hong 已提交
226
                x_emb,
C
ccrrong 已提交
227 228
                p=self.drop_out,
                mode='upscale_in_train',
229
            )
230
        rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(
231 232
            x_emb, init_h, init_c
        )
H
hong 已提交
233

234
        rnn_out = paddle.reshape(
235 236
            rnn_out, shape=[-1, self.num_steps, self.hidden_size]
        )
K
kangguangli 已提交
237
        projection = paddle.matmul(rnn_out, self.softmax_weight)
238
        projection = paddle.add(projection, self.softmax_bias)
239
        projection = paddle.reshape(projection, shape=[-1, self.vocab_size])
240
        loss = paddle.nn.functional.softmax_with_cross_entropy(
241 242
            logits=projection, label=label, soft_label=False
        )
243
        loss = paddle.reshape(loss, shape=[-1, self.num_steps])
244
        loss = paddle.mean(loss, axis=[0])
245
        loss = paddle.sum(loss)
H
hong 已提交
246 247 248 249

        return loss, last_hidden, last_cell


250
class TestSaveLoadBase(unittest.TestCase):
251
    def set_place(self):
252 253 254 255 256
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
257

H
hong 已提交
258 259 260 261 262 263 264 265 266
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
267
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
268 269 270 271

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
272 273 274 275 276 277 278 279
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
280

281
            place = self.set_place()
H
hong 已提交
282 283
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
284
            x = paddle.static.data(
285 286
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
287 288 289 290 291
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
292
            )
G
GGBond8488 已提交
293 294 295
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
296
            )
G
GGBond8488 已提交
297
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
298 299

            static_loss, static_last_hidden, static_last_cell = ptb_model(
300 301
                x, y, init_hidden, init_cell
            )
H
hong 已提交
302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
317 318 319 320 321
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
322
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
323 324 325 326 327 328 329 330 331 332
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
333 334 335 336 337 338 339 340
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
341
                if isinstance(var, framework.Parameter) or var.persistable:
342 343 344
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
345
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
346 347 348
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

349
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
350 351 352

            # set var to zero
            for var in main_program.list_vars():
353
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
354 355 356
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

357 358 359
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
360
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
361 362
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

363 364 365 366 367
            fluid.load(
                main_program,
                os.path.join(temp_dir.name, "test_1.pdparams"),
                exe,
            )
H
hong 已提交
368 369

            for var in main_program.list_vars():
370
                if isinstance(var, framework.Parameter) or var.persistable:
371 372 373
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
374
                    base_t = base_map[var.name]
375
                    np.testing.assert_array_equal(new_t, base_t)
376
            temp_dir.cleanup()
H
hong 已提交
377 378


379
class TestSaveLoadPartial(unittest.TestCase):
380
    def set_place(self):
381 382 383 384 385
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
386

H
hong 已提交
387 388 389 390 391 392 393 394 395
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
396
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
397 398 399 400

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
401 402 403 404 405 406 407 408
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
409

410
            place = self.set_place()
H
hong 已提交
411 412
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
413
            x = paddle.static.data(
414 415
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
416 417 418 419 420
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
421
            )
G
GGBond8488 已提交
422 423 424
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
425
            )
G
GGBond8488 已提交
426
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
427 428

            static_loss, static_last_hidden, static_last_cell = ptb_model(
429 430
                x, y, init_hidden, init_cell
            )
H
hong 已提交
431 432 433

            test_program = fluid.default_main_program().clone(for_test=True)

C
Charles-hit 已提交
434
            add_1 = paddle.static.nn.fc(
435 436 437 438 439
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
456 457 458 459 460
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
461
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
462 463 464 465 466 467 468 469 470 471
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
472 473 474 475 476 477 478 479
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
480
                if isinstance(var, framework.Parameter) or var.persistable:
481 482 483
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
484
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
485 486 487
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

488
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
489 490 491

            # set var to zero
            for var in main_program.list_vars():
492
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
493 494 495
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

496 497 498
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
499
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
500 501
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

502 503 504
            fluid.load(
                test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None
            )
H
hong 已提交
505 506

            for var in test_program.list_vars():
507
                if isinstance(var, framework.Parameter) or var.persistable:
508 509 510
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
511
                    base_t = base_map[var.name]
512
                    np.testing.assert_array_equal(new_t, base_t)
513 514 515 516 517
            fluid.load(
                test_program,
                os.path.join(temp_dir.name, "test_1.pdmodel"),
                None,
            )
518
            temp_dir.cleanup()
H
hong 已提交
519 520


521
class TestSaveLoadSetStateDict(unittest.TestCase):
522
    def set_place(self):
523 524 525 526 527
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
528

529 530 531 532 533 534 535 536 537
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
538
        temp_dir = tempfile.TemporaryDirectory()
539 540 541 542

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
543 544 545 546 547 548 549 550
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
551

552
            place = self.set_place()
553 554
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
555
            x = paddle.static.data(
556 557
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
558 559 560 561 562
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
563
            )
G
GGBond8488 已提交
564 565 566
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
567
            )
G
GGBond8488 已提交
568
            init_cell.desc.set_need_check_feed(False)
569 570

            static_loss, static_last_hidden, static_last_cell = ptb_model(
571 572
                x, y, init_hidden, init_cell
            )
573 574 575 576 577 578 579 580 581 582 583 584 585 586 587
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
588 589 590 591 592
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
593
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
594 595 596 597 598 599 600 601 602 603
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
604 605 606 607 608 609 610 611 612
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
613 614 615
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
616
                    # make sure all the paramerter or optimizer var have been update
617 618 619
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

620
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
621 622 623 624 625 626 627

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

628 629 630
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
631
                    # make sure all the paramerter or optimizer var have been set to zero
632 633
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

634
            fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe)
635 636 637

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
638 639 640
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
641
                    base_t = base_map[var.name]
642
                    np.testing.assert_array_equal(new_t, base_t)
643
            temp_dir.cleanup()
644 645 646


class TestProgramStatePartial(unittest.TestCase):
647
    def set_place(self):
648 649 650 651 652
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
653

654 655 656 657 658 659 660 661 662
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
663
        temp_dir = tempfile.TemporaryDirectory()
664 665 666 667

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
668 669 670 671 672 673 674 675
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
676

677
            place = self.set_place()
678 679
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
680
            x = paddle.static.data(
681 682
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
683 684 685 686 687
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
688
            )
G
GGBond8488 已提交
689 690 691
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
692
            )
G
GGBond8488 已提交
693
            init_cell.desc.set_need_check_feed(False)
694 695

            static_loss, static_last_hidden, static_last_cell = ptb_model(
696 697
                x, y, init_hidden, init_cell
            )
698 699 700

            test_program = fluid.default_main_program().clone(for_test=True)

C
Charles-hit 已提交
701
            add_1 = paddle.static.nn.fc(
702 703 704 705 706
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
723 724 725 726 727
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
728
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
729 730 731 732 733 734 735 736 737 738
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
739 740 741 742 743 744 745 746 747
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
748 749 750
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
751
                    # make sure all the paramerter or optimizer var have been update
752 753 754
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

755
            fluid.save(main_program, os.path.join(temp_dir.name, 'test_1'))
756 757 758 759 760 761 762

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

763 764 765
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
766
                    # make sure all the paramerter or optimizer var have been set to zero
767 768
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

769
            # fluid.load(test_program, "./test_1", None )
770
            program_state = fluid.load_program_state(
771 772
                os.path.join(temp_dir.name, 'test_1')
            )
H
hong 已提交
773 774

            program_state_1 = fluid.load_program_state(
775 776
                os.path.join(temp_dir.name, 'test_1.pdparams')
            )
H
hong 已提交
777 778

            program_state_2 = fluid.load_program_state(
779 780
                os.path.join(temp_dir.name, 'test_1.pdopt')
            )
H
hong 已提交
781 782

            program_state_3 = fluid.load_program_state(
783 784
                os.path.join(temp_dir.name, 'test_1.pdmodel')
            )
H
hong 已提交
785

786 787 788 789
            fluid.set_program_state(test_program, program_state)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
790 791 792
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
793
                    base_t = base_map[var.name]
794
                    np.testing.assert_array_equal(new_t, base_t)
795

H
hong 已提交
796 797 798 799 800 801
            # check 1
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

802 803 804
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
805
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
806 807 808 809 810 811
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_1)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
812 813 814
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
815
                    base_t = base_map[var.name]
816
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
817 818 819 820 821 822 823

            # check 2
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

824 825 826
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
827
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
828 829 830 831 832 833
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_2)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
834 835 836
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
837
                    base_t = base_map[var.name]
838
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
839 840 841 842 843 844 845

            # check 3
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

846 847 848
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
849
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
850 851 852 853 854 855
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_3)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
856 857 858
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
859
                    base_t = base_map[var.name]
860
                    np.testing.assert_array_equal(new_t, base_t)
861
            temp_dir.cleanup()
H
hong 已提交
862

863 864

class TestVariableInit(unittest.TestCase):
865
    def set_place(self):
866 867 868 869 870
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
871

872 873 874
    def test_variable_init(self):

        x = fluid.data(name="x", shape=[10, 10], dtype='float32')
C
Charles-hit 已提交
875 876
        y = paddle.static.nn.fc(x, 10)
        z = paddle.static.nn.fc(y, 10)
877

878
        place = self.set_place()
879 880 881
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

882
        temp_dir = tempfile.TemporaryDirectory()
883 884 885 886
        fluid.save(
            fluid.default_main_program(),
            os.path.join(temp_dir.name, "test_path"),
        )
887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904

        def set_var(var, ndarray):
            t = var.get_tensor()
            p = t._place()
            if p.is_cpu_place():
                place = paddle.fluid.CPUPlace()
            elif p.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            else:
                p = paddle.fluid.core.Place()
                p.set_place(t._place())
                place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            t.set(ndarray, place)

        program = fluid.default_main_program()
        new_scope = fluid.core.Scope()

905
        place = self.set_place()
906
        exe = fluid.Executor(place)
907 908 909
        parameter_list = list(
            filter(fluid.io.is_parameter, program.list_vars())
        )
910

911 912 913
        fluid.core._create_loaded_parameter(
            parameter_list, new_scope, exe._default_executor
        )
914
        parameter_file_name = os.path.join(temp_dir.name, "test_path.pdparams")
915 916 917 918
        with open(parameter_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in parameter_list:
919 920 921 922 923
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, parameter_file_name
            )
924 925 926 927
            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        opt_list = list(
928 929
            filter(fluid.io.is_belong_to_optimizer, program.list_vars())
        )
930

931 932 933
        fluid.core._create_loaded_parameter(
            opt_list, new_scope, exe._default_executor
        )
934
        opt_file_name = os.path.join(temp_dir.name, "test_path.pdopt")
935 936 937 938
        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in opt_list:
939 940 941 942 943
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, opt_file_name
            )
944 945 946 947 948 949 950

            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        base_map = {}
        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
951 952 953
                t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
T
tianshuo78520a 已提交
954
                # make sure all the paramerter or optimizer var have been update
955 956 957 958 959 960 961
                base_map[var.name] = t

        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
                new_t = np.array(new_scope.find_var(var.name).get_tensor())
                base_t = base_map[var.name]

962
                np.testing.assert_array_equal(new_t, base_t)
963
        temp_dir.cleanup()
964 965


H
hong 已提交
966 967 968 969 970
class TestLoadFromOldInterface(unittest.TestCase):
    def setUp(self):
        if os.path.exists("test_path.pdparams"):
            os.remove("test_path.pdparams")

971 972 973
        if os.path.exists("test_static_load_var_list.pdparams"):
            os.remove("test_static_load_var_list.pdparams")

974 975
        self.temp_dir = tempfile.TemporaryDirectory()

976
    def set_place(self):
977 978 979 980 981
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
982

983 984 985
    def tearDown(self):
        self.temp_dir.cleanup()

H
hong 已提交
986 987 988 989 990 991 992 993 994 995 996 997 998
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
999 1000 1001 1002 1003 1004 1005 1006
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1007

1008
            place = self.set_place()
H
hong 已提交
1009 1010
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
1011
            x = paddle.static.data(
1012 1013
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
1014 1015 1016 1017 1018
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
1019
            )
G
GGBond8488 已提交
1020 1021 1022
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
1023
            )
G
GGBond8488 已提交
1024
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
1025 1026

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1027 1028
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1046 1047 1048 1049 1050
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1051
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1052 1053 1054 1055 1056 1057 1058 1059 1060 1061
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1062 1063 1064 1065 1066 1067 1068 1069 1070
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1071 1072 1073
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1074
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1075 1076 1077
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1078
            # fluid.save(main_program, "./test_1")
1079
            paddle.distributed.io.save_persistables(
1080 1081
                exe, os.path.join(self.temp_dir.name, "test_path"), main_program
            )
H
hong 已提交
1082 1083 1084 1085 1086 1087 1088

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1089 1090 1091
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1092
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1093 1094
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1095 1096 1097
            fluid.load(
                main_program, os.path.join(self.temp_dir.name, "test_path"), exe
            )
H
hong 已提交
1098 1099 1100

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1101 1102 1103
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1104
                    base_t = base_map[var.name]
1105
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1106 1107 1108 1109 1110 1111 1112 1113 1114

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)
            with self.assertRaises(RuntimeError):
1115 1116 1117 1118 1119
                fluid.load(
                    main_program,
                    os.path.join(self.temp_dir.name, "test_path"),
                    exe,
                )
H
hong 已提交
1120

T
tianshuo78520a 已提交
1121
            # check unused parameter
H
hong 已提交
1122

1123 1124 1125 1126 1127
            fluid.load(
                test_clone_program,
                os.path.join(self.temp_dir.name, "test_path"),
                exe,
            )
H
hong 已提交
1128

1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141
    def test_load_from_old_interface_var_list(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1142 1143 1144 1145 1146 1147 1148 1149
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
1150

1151
            place = self.set_place()
1152 1153
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
1154
            x = paddle.static.data(
1155 1156
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
1157 1158 1159 1160 1161
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
1162
            )
G
GGBond8488 已提交
1163 1164 1165
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
1166
            )
G
GGBond8488 已提交
1167
            init_cell.desc.set_need_check_feed(False)
1168
            static_loss, static_last_hidden, static_last_cell = ptb_model(
1169 1170
                x, y, init_hidden, init_cell
            )
1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1188 1189 1190 1191 1192
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
1193
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1194 1195 1196 1197 1198 1199 1200 1201 1202 1203
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
1204 1205 1206 1207 1208 1209 1210 1211 1212
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1213 1214 1215
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1216 1217 1218 1219
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1220
            # fluid.save(main_program, "./test_1")
1221
            paddle.distributed.io.save_persistables(
1222 1223
                exe,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1224 1225
                main_program,
            )
1226

1227
            # set var to zero
1228 1229 1230 1231 1232 1233 1234 1235
            var_list = []
            for i, var in enumerate(main_program.list_vars()):
                if isinstance(var, framework.Parameter) or var.persistable:
                    if i % 2 == 0:
                        var_list.append(var)
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1236 1237 1238
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1239 1240 1241
                    # make sure all the paramerter or optimizer var have been set to zero
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1242 1243 1244
            fluid.load(
                main_program,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1245 1246 1247
                exe,
                var_list,
            )
1248 1249 1250
            var_list_names = [var.name for var in var_list]
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1251 1252 1253
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1254 1255 1256
                    if var.name in var_list_names:
                        # loaded vars
                        base_t = base_map[var.name]
1257
                        np.testing.assert_array_equal(new_t, base_t)
1258
                    else:
1259
                        # not loaded vars
1260 1261
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

H
hong 已提交
1262 1263

class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
1264
    def set_place(self):
1265 1266 1267 1268 1269
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1270

H
hong 已提交
1271 1272 1273 1274 1275 1276 1277 1278 1279
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1280
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1281 1282 1283 1284

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1285 1286 1287 1288 1289 1290 1291 1292
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1293

1294
            place = self.set_place()
H
hong 已提交
1295 1296
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
1297
            x = paddle.static.data(
1298 1299
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
1300 1301 1302 1303 1304
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
1305
            )
G
GGBond8488 已提交
1306 1307 1308
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
1309
            )
G
GGBond8488 已提交
1310
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
1311 1312

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1313 1314
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1330 1331 1332 1333 1334
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1335
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1336 1337 1338 1339 1340 1341 1342 1343 1344 1345
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1346 1347 1348 1349 1350 1351 1352 1353 1354
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1355 1356 1357
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1358
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1359 1360
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1361
            save_dir = os.path.join(temp_dir.name, "test_path")
1362
            # fluid.save(main_program, "./test_1")
1363
            paddle.distributed.io.save_persistables(
1364 1365
                exe, save_dir, main_program, filename="model_single"
            )
H
hong 已提交
1366 1367 1368 1369 1370 1371 1372

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1373 1374 1375
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1376
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1377 1378
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1379
            file_model_path = os.path.join(save_dir, "model_single")
1380 1381 1382 1383 1384 1385
            fluid.load(
                main_program,
                file_model_path,
                exe,
                fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1386 1387 1388

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1389 1390 1391
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1392
                    base_t = base_map[var.name]
1393
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405

            # test exception
            # change shape
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)

            with self.assertRaises(RuntimeError):
1406 1407 1408 1409 1410 1411 1412 1413 1414 1415
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )

            fluid.io.save_params(
                exe, "test_path", main_program, filename="model_single"
            )
H
hong 已提交
1416
            with self.assertRaises(RuntimeError):
1417 1418 1419 1420 1421 1422
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1423 1424 1425

            # check when executor is None
            with self.assertRaises(ValueError):
1426 1427 1428 1429 1430 1431
                fluid.load(
                    main_program,
                    file_model_path,
                    None,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1432 1433 1434 1435 1436 1437 1438

            # check when var list is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, exe, None)

            # check save params, load var_list = get_program_persistable_vars
            with self.assertRaises(RuntimeError):
1439 1440 1441
                temp_var = framework.Variable(
                    main_program.global_block(), shape=[1], name="test_temp_var"
                )
H
hong 已提交
1442
                all_var_list = list(main_program.list_vars())
1443 1444 1445 1446 1447 1448
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    all_var_list + [temp_var],
                )
1449
        temp_dir.cleanup()
H
hong 已提交
1450 1451


H
hong 已提交
1452
class TestProgramStateOldSave(unittest.TestCase):
1453 1454
    def setUp(self):
        self.test_dygraph = True
1455 1456 1457 1458
        self.temp_dir = tempfile.TemporaryDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()
1459 1460

    def set_place(self):
1461 1462 1463 1464 1465
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1466

H
hong 已提交
1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1480 1481 1482 1483 1484 1485 1486 1487
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1488

1489
            place = self.set_place()
H
hong 已提交
1490 1491
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
1492
            x = paddle.static.data(
1493 1494
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
1495 1496 1497 1498 1499
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
1500
            )
G
GGBond8488 已提交
1501 1502 1503
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
1504
            )
G
GGBond8488 已提交
1505
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
1506 1507

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1508 1509
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1510 1511 1512

            test_program = fluid.default_main_program().clone(for_test=True)

C
Charles-hit 已提交
1513
            add_1 = paddle.static.nn.fc(
1514 1515 1516 1517 1518
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1535 1536 1537 1538 1539
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1540
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1541 1542 1543 1544 1545 1546 1547 1548 1549 1550
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1551 1552 1553 1554 1555 1556 1557 1558 1559
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1560 1561 1562
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1563
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1564 1565
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1566
            save_dir = os.path.join(self.temp_dir.name, "test_program_1")
1567
            paddle.distributed.io.save_persistables(exe, save_dir, main_program)
H
hong 已提交
1568 1569 1570 1571 1572 1573 1574

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1575 1576 1577
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1578
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1579 1580
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1581
            # case 1: load basic
1582
            program_state = fluid.load_program_state(save_dir)
H
hong 已提交
1583
            fluid.set_program_state(main_program, program_state)
1584 1585 1586
            self.check_in_static(main_program, base_map)

            # case 2: load with no need file
1587 1588
            def symlink_force(target, link_name):
                try:
1589
                    self.create_symlink(target, link_name)
1590 1591 1592
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        os.remove(link_name)
1593
                        self.create_symlink(target, link_name)
1594 1595 1596
                    else:
                        raise e

1597
            program_state = fluid.load_program_state(save_dir)
1598 1599
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1600

1601 1602
            # case 3: load with var_list
            program_state = fluid.load_program_state(
1603 1604
                save_dir, main_program.all_parameters()
            )
1605 1606
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1607

1608 1609 1610
        if self.test_dygraph:
            # make sure `load_program_state` can be used in dynamic graph mode
            with fluid.dygraph.guard(place):
1611
                load_state = fluid.load_program_state(save_dir)
1612
                for k, v in load_state.items():
1613
                    np.testing.assert_array_equal(base_map[k], v)
1614

1615 1616 1617 1618 1619
    def create_symlink(self, target, link_name):
        try:
            os.symlink(target, link_name)
        except AttributeError:
            import ctypes
1620

1621 1622 1623
            kernel_dll = ctypes.windll.LoadLibrary("kernel32.dll")
            kernel_dll.CreateSymbolicLinkA(target, link_name, 0)

1624 1625 1626
    def check_in_static(self, main_program, base_map):
        for var in main_program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
1627 1628 1629
                new_t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
1630
                base_t = base_map[var.name]
1631
                np.testing.assert_array_equal(new_t, base_t)
1632

H
hong 已提交
1633 1634

class TestProgramStateOldSaveSingleModel(unittest.TestCase):
1635
    def set_place(self):
1636 1637 1638 1639 1640
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1641

H
hong 已提交
1642 1643 1644 1645 1646 1647 1648 1649 1650
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1651
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1652 1653 1654 1655

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1656 1657 1658 1659 1660 1661 1662 1663
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1664

1665
            place = self.set_place()
H
hong 已提交
1666 1667
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
G
GGBond8488 已提交
1668
            x = paddle.static.data(
1669 1670
                name="x", shape=[-1, num_steps], dtype='int64'
            )
G
GGBond8488 已提交
1671 1672 1673 1674 1675
            x.desc.set_need_check_feed(False)
            y = paddle.static.data(name="y", shape=[-1, 1], dtype='float32')
            y.desc.set_need_check_feed(False)
            init_hidden = paddle.static.data(
                name="init_hidden", shape=[-1, 1], dtype='float32'
1676
            )
G
GGBond8488 已提交
1677 1678 1679
            init_hidden.desc.set_need_check_feed(False)
            init_cell = paddle.static.data(
                name="init_cell", shape=[-1, 1], dtype='float32'
1680
            )
G
GGBond8488 已提交
1681
            init_cell.desc.set_need_check_feed(False)
H
hong 已提交
1682 1683

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1684 1685
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1686 1687 1688

            test_program = fluid.default_main_program().clone(for_test=True)

C
Charles-hit 已提交
1689
            add_1 = paddle.static.nn.fc(
1690 1691 1692 1693 1694
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1711 1712 1713 1714 1715
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1716
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1717 1718 1719 1720 1721 1722 1723 1724 1725 1726
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1727 1728 1729 1730 1731 1732 1733 1734 1735
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1736 1737 1738
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1739
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1740 1741 1742
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1743
            save_dir = os.path.join(temp_dir.name, "test_program_2")
1744
            paddle.distributed.io.save_persistables(
1745 1746
                exe, save_dir, main_program, filename="model_1"
            )
H
hong 已提交
1747 1748 1749 1750 1751 1752 1753

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1754 1755 1756
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1757
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1758 1759
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1760
            # fluid.load(test_program, "./test_1", None )
H
hong 已提交
1761
            program_state = fluid.load_program_state(
1762
                os.path.join(save_dir, "model_1"),
1763 1764
                var_list=fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1765 1766 1767 1768
            fluid.set_program_state(main_program, program_state)

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1769 1770 1771
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1772
                    base_t = base_map[var.name]
1773
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1774 1775

            with self.assertRaises(ValueError):
1776
                fluid.load_program_state(os.path.join(save_dir, "model_1"))
H
hong 已提交
1777 1778

            with self.assertRaises(TypeError):
1779 1780 1781
                fluid.load_program_state(
                    os.path.join(save_dir, "model_1"), var_list=["str"]
                )
H
hong 已提交
1782 1783 1784

            with self.assertRaises(RuntimeError):
                fluid.load_program_state(
1785
                    os.path.join(save_dir, "model_1"),
H
hong 已提交
1786 1787
                    var_list=[
                        main_program.global_block().create_var(
1788 1789 1790 1791
                            name="fake_var_name", persistable=True
                        )
                    ],
                )
1792
        temp_dir.cleanup()
H
hong 已提交
1793 1794


W
WeiXin 已提交
1795 1796
class TestStaticSaveLoadPickle(unittest.TestCase):
    def test_pickle_protocol(self):
1797
        # enable static graph mode
W
WeiXin 已提交
1798 1799 1800 1801
        paddle.enable_static()

        with new_program_scope():
            # create network
1802 1803 1804 1805 1806
            x = paddle.static.data(
                name="static_save_load_large_x",
                shape=[None, 10],
                dtype='float32',
            )
G
GGBond8488 已提交
1807
            x.desc.set_need_check_feed(False)
W
WeiXin 已提交
1808 1809 1810 1811 1812 1813 1814 1815 1816
            z = paddle.static.nn.fc(x, 10, bias_attr=False)
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()

            base_map = {}
            for var in prog.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1817 1818 1819
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
W
WeiXin 已提交
1820 1821 1822 1823
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1824
            temp_dir = tempfile.TemporaryDirectory()
1825 1826 1827
            path = os.path.join(
                temp_dir.name, "test_static_save_load_pickle", "pickle_protocol"
            )
W
WeiXin 已提交
1828 1829 1830 1831 1832 1833 1834 1835 1836 1837

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 2.0)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 1)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 5)

1838
            protocols = [2, 3, 4]
W
WeiXin 已提交
1839 1840 1841 1842 1843
            for protocol in protocols:
                paddle.fluid.save(prog, path, protocol)
                # set var to zero
                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1844 1845 1846
                        ten = (
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1847 1848
                        ten.set(np.zeros_like(np.array(ten)), place)

1849 1850 1851
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1852 1853 1854 1855 1856 1857
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

                paddle.fluid.load(prog, path)

                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1858 1859 1860
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1861
                        base_t = base_map[var.name]
1862
                        np.testing.assert_array_equal(new_t, base_t)
W
WeiXin 已提交
1863 1864


1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876
class TestSaveLoadInferenceModel(unittest.TestCase):
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'no_params')

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_no_params(self):
        main_program = framework.Program()
        with framework.program_guard(main_program):
            x = paddle.static.data(name="x", shape=[10, 10], dtype='float32')
G
GGBond8488 已提交
1877
            x.desc.set_need_check_feed(False)
1878 1879 1880 1881 1882 1883 1884
            y = x + x

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)

            paddle.static.save_inference_model(self.model_path, [x], [y], exe)

1885 1886 1887 1888 1889
            [
                inference_program,
                feed_target_names,
                fetch_targets,
            ] = paddle.static.load_inference_model(self.model_path, exe)
1890 1891 1892 1893 1894 1895 1896

            self.assertEqual(feed_target_names, ['x'])
            self.assertEqual(fetch_targets[0].shape, (10, 10))
            ops = [op.type for op in inference_program.block(0).ops]
            self.assertEqual(ops, ['feed', 'elementwise_add', 'scale', 'fetch'])


H
hong 已提交
1897
if __name__ == '__main__':
1898
    paddle.enable_static()
H
hong 已提交
1899
    unittest.main()