test_static_save_load.py 69.9 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import unittest
17
import paddle
H
hong 已提交
18 19 20 21 22 23
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam
from test_imperative_base import new_program_scope
import numpy as np
24
import pickle
H
hong 已提交
25
import os
26
import errno
27
import tempfile
H
hong 已提交
28

29 30
paddle.enable_static()

H
hong 已提交
31 32

class SimpleLSTMRNN(fluid.Layer):
33 34 35 36 37 38 39 40 41
    def __init__(
        self,
        name_scope,
        hidden_size,
        num_steps,
        num_layers=2,
        init_scale=0.1,
        dropout=None,
    ):
42
        super().__init__()
H
hong 已提交
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60
        self._hidden_size = hidden_size
        self._num_layers = num_layers
        self._init_scale = init_scale
        self._dropout = dropout
        self._input = None
        self._num_steps = num_steps
        self.cell_array = []
        self.hidden_array = []

        self.weight_1_arr = []
        self.weight_2_arr = []
        self.bias_arr = []
        self.mask_array = []

        for i in range(self._num_layers):
            weight_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
61 62 63
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
64 65 66
                shape=[self._hidden_size * 2, self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.UniformInitializer(
67 68 69
                    low=-self._init_scale, high=self._init_scale
                ),
            )
H
hong 已提交
70 71 72 73
            self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
            bias_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
74 75 76
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
77 78
                shape=[self._hidden_size * 4],
                dtype="float32",
79 80
                default_initializer=fluid.initializer.Constant(0.0),
            )
H
hong 已提交
81 82 83 84 85 86 87
            self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))

    def forward(self, input_embedding, init_hidden=None, init_cell=None):
        self.cell_array = []
        self.hidden_array = []

        for i in range(self._num_layers):
2
201716010711 已提交
88
            pre_hidden = paddle.slice(
89 90
                init_hidden, axes=[0], starts=[i], ends=[i + 1]
            )
2
201716010711 已提交
91
            pre_cell = paddle.slice(
92 93
                init_cell, axes=[0], starts=[i], ends=[i + 1]
            )
94
            pre_hidden = paddle.reshape(
95 96
                pre_hidden, shape=[-1, self._hidden_size]
            )
97
            pre_cell = paddle.reshape(pre_cell, shape=[-1, self._hidden_size])
H
hong 已提交
98 99 100 101 102
            self.hidden_array.append(pre_hidden)
            self.cell_array.append(pre_cell)

        res = []
        for index in range(self._num_steps):
2
201716010711 已提交
103
            self._input = paddle.slice(
104 105
                input_embedding, axes=[1], starts=[index], ends=[index + 1]
            )
106
            self._input = paddle.reshape(
107 108
                self._input, shape=[-1, self._hidden_size]
            )
H
hong 已提交
109 110 111 112 113 114 115 116 117 118
            for k in range(self._num_layers):
                pre_hidden = self.hidden_array[k]
                pre_cell = self.cell_array[k]
                weight_1 = self.weight_1_arr[k]
                bias = self.bias_arr[k]

                nn = fluid.layers.concat([self._input, pre_hidden], 1)
                gate_input = fluid.layers.matmul(x=nn, y=weight_1)

                gate_input = fluid.layers.elementwise_add(gate_input, bias)
119 120 121
                i, j, f, o = fluid.layers.split(
                    gate_input, num_or_sections=4, dim=-1
                )
122 123 124 125
                c = pre_cell * paddle.nn.functional.sigmoid(
                    f
                ) + paddle.nn.functional.sigmoid(i) * paddle.tanh(j)
                m = paddle.tanh(c) * paddle.nn.functional.sigmoid(o)
H
hong 已提交
126 127 128 129 130 131 132 133
                self.hidden_array[k] = m
                self.cell_array[k] = c
                self._input = m

                if self._dropout is not None and self._dropout > 0.0:
                    self._input = fluid.layers.dropout(
                        self._input,
                        dropout_prob=self._dropout,
134 135
                        dropout_implementation='upscale_in_train',
                    )
H
hong 已提交
136
            res.append(
137
                paddle.reshape(self._input, shape=[1, -1, self._hidden_size])
138
            )
H
hong 已提交
139
        real_res = fluid.layers.concat(res, 0)
140
        real_res = paddle.transpose(x=real_res, perm=[1, 0, 2])
H
hong 已提交
141
        last_hidden = fluid.layers.concat(self.hidden_array, 1)
142
        last_hidden = paddle.reshape(
143 144
            last_hidden, shape=[-1, self._num_layers, self._hidden_size]
        )
145
        last_hidden = paddle.transpose(x=last_hidden, perm=[1, 0, 2])
H
hong 已提交
146
        last_cell = fluid.layers.concat(self.cell_array, 1)
147
        last_cell = paddle.reshape(
148 149
            last_cell, shape=[-1, self._num_layers, self._hidden_size]
        )
150
        last_cell = paddle.transpose(x=last_cell, perm=[1, 0, 2])
H
hong 已提交
151 152 153 154
        return real_res, last_hidden, last_cell


class PtbModel(fluid.Layer):
155 156 157 158 159 160 161 162 163 164
    def __init__(
        self,
        name_scope,
        hidden_size,
        vocab_size,
        num_layers=2,
        num_steps=20,
        init_scale=0.1,
        dropout=None,
    ):
165
        super().__init__()
H
hong 已提交
166 167 168 169 170 171
        self.hidden_size = hidden_size
        self.vocab_size = vocab_size
        self.init_scale = init_scale
        self.num_layers = num_layers
        self.num_steps = num_steps
        self.dropout = dropout
172 173 174 175 176 177 178 179
        self.simple_lstm_rnn = SimpleLSTMRNN(
            self.full_name(),
            hidden_size,
            num_steps,
            num_layers=num_layers,
            init_scale=init_scale,
            dropout=dropout,
        )
180 181 182 183
        self.embedding = paddle.nn.Embedding(
            num_embeddings=vocab_size,
            embedding_dim=hidden_size,
            weight_attr=fluid.ParamAttr(
H
hong 已提交
184 185
                name='embedding_para',
                initializer=fluid.initializer.UniformInitializer(
186 187 188 189
                    low=-init_scale, high=init_scale
                ),
            ),
        )
H
hong 已提交
190 191 192 193 194
        self.softmax_weight = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.hidden_size, self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
195 196 197
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
198 199 200 201 202
        self.softmax_bias = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
203 204 205
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
206 207

    def forward(self, input, label, init_hidden, init_cell):
208
        init_h = paddle.reshape(
209 210
            init_hidden, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
211

212
        init_c = paddle.reshape(
213 214
            init_cell, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
215

216 217
        # NPU 'tok_k' kernel only support `int32` dtype, so cast `input` from `int64` to `int32`.
        input = fluid.layers.cast(input, "int32")
H
hong 已提交
218
        x_emb = self.embedding(input)
219
        x_emb = paddle.reshape(
220 221
            x_emb, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
222 223 224 225
        if self.dropout is not None and self.dropout > 0.0:
            x_emb = fluid.layers.dropout(
                x_emb,
                dropout_prob=self.drop_out,
226 227
                dropout_implementation='upscale_in_train',
            )
228
        rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(
229 230
            x_emb, init_h, init_c
        )
H
hong 已提交
231

232
        rnn_out = paddle.reshape(
233 234
            rnn_out, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
235 236
        projection = fluid.layers.matmul(rnn_out, self.softmax_weight)
        projection = fluid.layers.elementwise_add(projection, self.softmax_bias)
237
        projection = paddle.reshape(projection, shape=[-1, self.vocab_size])
238 239 240
        loss = fluid.layers.softmax_with_cross_entropy(
            logits=projection, label=label, soft_label=False
        )
241
        loss = paddle.reshape(loss, shape=[-1, self.num_steps])
H
hong 已提交
242
        loss = fluid.layers.reduce_mean(loss, dim=[0])
243
        loss = paddle.sum(loss)
H
hong 已提交
244 245 246 247

        return loss, last_hidden, last_cell


248
class TestSaveLoadBase(unittest.TestCase):
249
    def set_place(self):
250 251 252 253 254
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
255

H
hong 已提交
256 257 258 259 260 261 262 263 264
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
265
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
266 267 268 269

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
270 271 272 273 274 275 276 277
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
278

279
            place = self.set_place()
H
hong 已提交
280 281
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
282 283 284
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
285
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
286 287 288 289 290 291
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
292 293

            static_loss, static_last_hidden, static_last_cell = ptb_model(
294 295
                x, y, init_hidden, init_cell
            )
H
hong 已提交
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
311 312 313 314 315
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
316
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
317 318 319 320 321 322 323 324 325 326
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
327 328 329 330 331 332 333 334
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
335
                if isinstance(var, framework.Parameter) or var.persistable:
336 337 338
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
339
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
340 341 342
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

343
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
344 345 346

            # set var to zero
            for var in main_program.list_vars():
347
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
348 349 350
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

351 352 353
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
354
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
355 356
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

357 358 359 360 361
            fluid.load(
                main_program,
                os.path.join(temp_dir.name, "test_1.pdparams"),
                exe,
            )
H
hong 已提交
362 363

            for var in main_program.list_vars():
364
                if isinstance(var, framework.Parameter) or var.persistable:
365 366 367
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
368
                    base_t = base_map[var.name]
369
                    np.testing.assert_array_equal(new_t, base_t)
370
            temp_dir.cleanup()
H
hong 已提交
371 372


373
class TestSaveLoadPartial(unittest.TestCase):
374
    def set_place(self):
375 376 377 378 379
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
380

H
hong 已提交
381 382 383 384 385 386 387 388 389
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
390
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
391 392 393 394

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
395 396 397 398 399 400 401 402
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
403

404
            place = self.set_place()
H
hong 已提交
405 406
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
407 408 409
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
410
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
411 412 413 414 415 416
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
417 418

            static_loss, static_last_hidden, static_last_cell = ptb_model(
419 420
                x, y, init_hidden, init_cell
            )
H
hong 已提交
421 422 423

            test_program = fluid.default_main_program().clone(for_test=True)

424 425 426 427 428 429
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
446 447 448 449 450
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
451
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
452 453 454 455 456 457 458 459 460 461
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
462 463 464 465 466 467 468 469
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
470
                if isinstance(var, framework.Parameter) or var.persistable:
471 472 473
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
474
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
475 476 477
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

478
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
479 480 481

            # set var to zero
            for var in main_program.list_vars():
482
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
483 484 485
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

486 487 488
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
489
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
490 491
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

492 493 494
            fluid.load(
                test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None
            )
H
hong 已提交
495 496

            for var in test_program.list_vars():
497
                if isinstance(var, framework.Parameter) or var.persistable:
498 499 500
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
501
                    base_t = base_map[var.name]
502
                    np.testing.assert_array_equal(new_t, base_t)
503 504 505 506 507
            fluid.load(
                test_program,
                os.path.join(temp_dir.name, "test_1.pdmodel"),
                None,
            )
508
            temp_dir.cleanup()
H
hong 已提交
509 510


511
class TestSaveLoadSetStateDict(unittest.TestCase):
512
    def set_place(self):
513 514 515 516 517
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
518

519 520 521 522 523 524 525 526 527
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
528
        temp_dir = tempfile.TemporaryDirectory()
529 530 531 532

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
533 534 535 536 537 538 539 540
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
541

542
            place = self.set_place()
543 544
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
545 546 547
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
548
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
549 550 551 552 553 554
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
555 556

            static_loss, static_last_hidden, static_last_cell = ptb_model(
557 558
                x, y, init_hidden, init_cell
            )
559 560 561 562 563 564 565 566 567 568 569 570 571 572 573
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
574 575 576 577 578
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
579
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
580 581 582 583 584 585 586 587 588 589
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
590 591 592 593 594 595 596 597 598
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
599 600 601
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
602
                    # make sure all the paramerter or optimizer var have been update
603 604 605
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

606
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
607 608 609 610 611 612 613

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

614 615 616
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
617
                    # make sure all the paramerter or optimizer var have been set to zero
618 619
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

620
            fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe)
621 622 623

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
624 625 626
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
627
                    base_t = base_map[var.name]
628
                    np.testing.assert_array_equal(new_t, base_t)
629
            temp_dir.cleanup()
630 631 632


class TestProgramStatePartial(unittest.TestCase):
633
    def set_place(self):
634 635 636 637 638
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
639

640 641 642 643 644 645 646 647 648
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
649
        temp_dir = tempfile.TemporaryDirectory()
650 651 652 653

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
654 655 656 657 658 659 660 661
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
662

663
            place = self.set_place()
664 665
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
666 667 668
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
669
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
670 671 672 673 674 675
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
676 677

            static_loss, static_last_hidden, static_last_cell = ptb_model(
678 679
                x, y, init_hidden, init_cell
            )
680 681 682

            test_program = fluid.default_main_program().clone(for_test=True)

683 684 685 686 687 688
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
705 706 707 708 709
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
710
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
711 712 713 714 715 716 717 718 719 720
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
721 722 723 724 725 726 727 728 729
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
730 731 732
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
733
                    # make sure all the paramerter or optimizer var have been update
734 735 736
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

737
            fluid.save(main_program, os.path.join(temp_dir.name, 'test_1'))
738 739 740 741 742 743 744

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

745 746 747
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
748
                    # make sure all the paramerter or optimizer var have been set to zero
749 750
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

751
            # fluid.load(test_program, "./test_1", None )
752
            program_state = fluid.load_program_state(
753 754
                os.path.join(temp_dir.name, 'test_1')
            )
H
hong 已提交
755 756

            program_state_1 = fluid.load_program_state(
757 758
                os.path.join(temp_dir.name, 'test_1.pdparams')
            )
H
hong 已提交
759 760

            program_state_2 = fluid.load_program_state(
761 762
                os.path.join(temp_dir.name, 'test_1.pdopt')
            )
H
hong 已提交
763 764

            program_state_3 = fluid.load_program_state(
765 766
                os.path.join(temp_dir.name, 'test_1.pdmodel')
            )
H
hong 已提交
767

768 769 770 771
            fluid.set_program_state(test_program, program_state)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
772 773 774
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
775
                    base_t = base_map[var.name]
776
                    np.testing.assert_array_equal(new_t, base_t)
777

H
hong 已提交
778 779 780 781 782 783
            # check 1
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

784 785 786
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
787
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
788 789 790 791 792 793
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_1)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
794 795 796
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
797
                    base_t = base_map[var.name]
798
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
799 800 801 802 803 804 805

            # check 2
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

806 807 808
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
809
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
810 811 812 813 814 815
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_2)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
816 817 818
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
819
                    base_t = base_map[var.name]
820
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
821 822 823 824 825 826 827

            # check 3
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

828 829 830
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
831
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
832 833 834 835 836 837
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_3)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
838 839 840
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
841
                    base_t = base_map[var.name]
842
                    np.testing.assert_array_equal(new_t, base_t)
843
            temp_dir.cleanup()
H
hong 已提交
844

845 846

class TestVariableInit(unittest.TestCase):
847
    def set_place(self):
848 849 850 851 852
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
853

854 855 856 857 858 859
    def test_variable_init(self):

        x = fluid.data(name="x", shape=[10, 10], dtype='float32')
        y = fluid.layers.fc(x, 10)
        z = fluid.layers.fc(y, 10)

860
        place = self.set_place()
861 862 863
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

864
        temp_dir = tempfile.TemporaryDirectory()
865 866 867 868
        fluid.save(
            fluid.default_main_program(),
            os.path.join(temp_dir.name, "test_path"),
        )
869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886

        def set_var(var, ndarray):
            t = var.get_tensor()
            p = t._place()
            if p.is_cpu_place():
                place = paddle.fluid.CPUPlace()
            elif p.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            else:
                p = paddle.fluid.core.Place()
                p.set_place(t._place())
                place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            t.set(ndarray, place)

        program = fluid.default_main_program()
        new_scope = fluid.core.Scope()

887
        place = self.set_place()
888
        exe = fluid.Executor(place)
889 890 891
        parameter_list = list(
            filter(fluid.io.is_parameter, program.list_vars())
        )
892

893 894 895
        fluid.core._create_loaded_parameter(
            parameter_list, new_scope, exe._default_executor
        )
896
        parameter_file_name = os.path.join(temp_dir.name, "test_path.pdparams")
897 898 899 900
        with open(parameter_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in parameter_list:
901 902 903 904 905
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, parameter_file_name
            )
906 907 908 909
            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        opt_list = list(
910 911
            filter(fluid.io.is_belong_to_optimizer, program.list_vars())
        )
912

913 914 915
        fluid.core._create_loaded_parameter(
            opt_list, new_scope, exe._default_executor
        )
916
        opt_file_name = os.path.join(temp_dir.name, "test_path.pdopt")
917 918 919 920
        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in opt_list:
921 922 923 924 925
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, opt_file_name
            )
926 927 928 929 930 931 932

            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        base_map = {}
        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
933 934 935
                t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
T
tianshuo78520a 已提交
936
                # make sure all the paramerter or optimizer var have been update
937 938 939 940 941 942 943
                base_map[var.name] = t

        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
                new_t = np.array(new_scope.find_var(var.name).get_tensor())
                base_t = base_map[var.name]

944
                np.testing.assert_array_equal(new_t, base_t)
945
        temp_dir.cleanup()
946 947


H
hong 已提交
948 949 950 951 952
class TestLoadFromOldInterface(unittest.TestCase):
    def setUp(self):
        if os.path.exists("test_path.pdparams"):
            os.remove("test_path.pdparams")

953 954 955
        if os.path.exists("test_static_load_var_list.pdparams"):
            os.remove("test_static_load_var_list.pdparams")

956 957
        self.temp_dir = tempfile.TemporaryDirectory()

958
    def set_place(self):
959 960 961 962 963
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
964

965 966 967
    def tearDown(self):
        self.temp_dir.cleanup()

H
hong 已提交
968 969 970 971 972 973 974 975 976 977 978 979 980
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
981 982 983 984 985 986 987 988
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
989

990
            place = self.set_place()
H
hong 已提交
991 992
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
993 994 995
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
996
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
997 998 999 1000 1001 1002
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1003 1004

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1005 1006
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1024 1025 1026 1027 1028
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1029
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1030 1031 1032 1033 1034 1035 1036 1037 1038 1039
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1040 1041 1042 1043 1044 1045 1046 1047 1048
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1049 1050 1051
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1052
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1053 1054 1055
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1056
            # fluid.save(main_program, "./test_1")
1057
            paddle.distributed.io.save_persistables(
1058 1059
                exe, os.path.join(self.temp_dir.name, "test_path"), main_program
            )
H
hong 已提交
1060 1061 1062 1063 1064 1065 1066

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1067 1068 1069
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1070
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1071 1072
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1073 1074 1075
            fluid.load(
                main_program, os.path.join(self.temp_dir.name, "test_path"), exe
            )
H
hong 已提交
1076 1077 1078

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1079 1080 1081
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1082
                    base_t = base_map[var.name]
1083
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1084 1085 1086 1087 1088 1089 1090 1091 1092

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)
            with self.assertRaises(RuntimeError):
1093 1094 1095 1096 1097
                fluid.load(
                    main_program,
                    os.path.join(self.temp_dir.name, "test_path"),
                    exe,
                )
H
hong 已提交
1098

T
tianshuo78520a 已提交
1099
            # check unused parameter
H
hong 已提交
1100

1101 1102 1103 1104 1105
            fluid.load(
                test_clone_program,
                os.path.join(self.temp_dir.name, "test_path"),
                exe,
            )
H
hong 已提交
1106

1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119
    def test_load_from_old_interface_var_list(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1120 1121 1122 1123 1124 1125 1126 1127
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
1128

1129
            place = self.set_place()
1130 1131
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1132 1133 1134
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
1135
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1136 1137 1138 1139 1140 1141
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
1142 1143

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1144 1145
                x, y, init_hidden, init_cell
            )
1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1163 1164 1165 1166 1167
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
1168
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1169 1170 1171 1172 1173 1174 1175 1176 1177 1178
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
1179 1180 1181 1182 1183 1184 1185 1186 1187
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1188 1189 1190
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1191 1192 1193 1194
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1195
            # fluid.save(main_program, "./test_1")
1196
            paddle.distributed.io.save_persistables(
1197 1198
                exe,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1199 1200
                main_program,
            )
1201

1202
            # set var to zero
1203 1204 1205 1206 1207 1208 1209 1210
            var_list = []
            for i, var in enumerate(main_program.list_vars()):
                if isinstance(var, framework.Parameter) or var.persistable:
                    if i % 2 == 0:
                        var_list.append(var)
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1211 1212 1213
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1214 1215 1216
                    # make sure all the paramerter or optimizer var have been set to zero
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1217 1218 1219
            fluid.load(
                main_program,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1220 1221 1222
                exe,
                var_list,
            )
1223 1224 1225
            var_list_names = [var.name for var in var_list]
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1226 1227 1228
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1229 1230 1231
                    if var.name in var_list_names:
                        # loaded vars
                        base_t = base_map[var.name]
1232
                        np.testing.assert_array_equal(new_t, base_t)
1233
                    else:
1234
                        # not loaded vars
1235 1236
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

H
hong 已提交
1237 1238

class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
1239
    def set_place(self):
1240 1241 1242 1243 1244
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1245

H
hong 已提交
1246 1247 1248 1249 1250 1251 1252 1253 1254
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1255
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1256 1257 1258 1259

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1260 1261 1262 1263 1264 1265 1266 1267
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1268

1269
            place = self.set_place()
H
hong 已提交
1270 1271
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1272 1273 1274
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1275
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1276 1277 1278 1279 1280 1281
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1282 1283

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1284 1285
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1301 1302 1303 1304 1305
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1306
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1307 1308 1309 1310 1311 1312 1313 1314 1315 1316
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1317 1318 1319 1320 1321 1322 1323 1324 1325
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1326 1327 1328
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1329
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1330 1331
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1332
            save_dir = os.path.join(temp_dir.name, "test_path")
1333
            # fluid.save(main_program, "./test_1")
1334
            paddle.distributed.io.save_persistables(
1335 1336
                exe, save_dir, main_program, filename="model_single"
            )
H
hong 已提交
1337 1338 1339 1340 1341 1342 1343

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1344 1345 1346
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1347
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1348 1349
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1350
            file_model_path = os.path.join(save_dir, "model_single")
1351 1352 1353 1354 1355 1356
            fluid.load(
                main_program,
                file_model_path,
                exe,
                fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1357 1358 1359

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1360 1361 1362
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1363
                    base_t = base_map[var.name]
1364
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376

            # test exception
            # change shape
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)

            with self.assertRaises(RuntimeError):
1377 1378 1379 1380 1381 1382 1383 1384 1385 1386
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )

            fluid.io.save_params(
                exe, "test_path", main_program, filename="model_single"
            )
H
hong 已提交
1387
            with self.assertRaises(RuntimeError):
1388 1389 1390 1391 1392 1393
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1394 1395 1396

            # check when executor is None
            with self.assertRaises(ValueError):
1397 1398 1399 1400 1401 1402
                fluid.load(
                    main_program,
                    file_model_path,
                    None,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1403 1404 1405 1406 1407 1408 1409

            # check when var list is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, exe, None)

            # check save params, load var_list = get_program_persistable_vars
            with self.assertRaises(RuntimeError):
1410 1411 1412
                temp_var = framework.Variable(
                    main_program.global_block(), shape=[1], name="test_temp_var"
                )
H
hong 已提交
1413
                all_var_list = list(main_program.list_vars())
1414 1415 1416 1417 1418 1419
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    all_var_list + [temp_var],
                )
1420
        temp_dir.cleanup()
H
hong 已提交
1421 1422


H
hong 已提交
1423
class TestProgramStateOldSave(unittest.TestCase):
1424 1425
    def setUp(self):
        self.test_dygraph = True
1426 1427 1428 1429
        self.temp_dir = tempfile.TemporaryDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()
1430 1431

    def set_place(self):
1432 1433 1434 1435 1436
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1437

H
hong 已提交
1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1451 1452 1453 1454 1455 1456 1457 1458
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1459

1460
            place = self.set_place()
H
hong 已提交
1461 1462
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1463 1464 1465
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1466
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1467 1468 1469 1470 1471 1472
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1473 1474

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1475 1476
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1477 1478 1479

            test_program = fluid.default_main_program().clone(for_test=True)

1480 1481 1482 1483 1484 1485
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1502 1503 1504 1505 1506
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1507
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1508 1509 1510 1511 1512 1513 1514 1515 1516 1517
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1518 1519 1520 1521 1522 1523 1524 1525 1526
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1527 1528 1529
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1530
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1531 1532
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1533
            save_dir = os.path.join(self.temp_dir.name, "test_program_1")
1534
            paddle.distributed.io.save_persistables(exe, save_dir, main_program)
H
hong 已提交
1535 1536 1537 1538 1539 1540 1541

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1542 1543 1544
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1545
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1546 1547
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1548
            # case 1: load basic
1549
            program_state = fluid.load_program_state(save_dir)
H
hong 已提交
1550
            fluid.set_program_state(main_program, program_state)
1551 1552 1553
            self.check_in_static(main_program, base_map)

            # case 2: load with no need file
1554 1555
            def symlink_force(target, link_name):
                try:
1556
                    self.create_symlink(target, link_name)
1557 1558 1559
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        os.remove(link_name)
1560
                        self.create_symlink(target, link_name)
1561 1562 1563
                    else:
                        raise e

1564
            program_state = fluid.load_program_state(save_dir)
1565 1566
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1567

1568 1569
            # case 3: load with var_list
            program_state = fluid.load_program_state(
1570 1571
                save_dir, main_program.all_parameters()
            )
1572 1573
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1574

1575 1576 1577
        if self.test_dygraph:
            # make sure `load_program_state` can be used in dynamic graph mode
            with fluid.dygraph.guard(place):
1578
                load_state = fluid.load_program_state(save_dir)
1579
                for k, v in load_state.items():
1580
                    np.testing.assert_array_equal(base_map[k], v)
1581

1582 1583 1584 1585 1586
    def create_symlink(self, target, link_name):
        try:
            os.symlink(target, link_name)
        except AttributeError:
            import ctypes
1587

1588 1589 1590
            kernel_dll = ctypes.windll.LoadLibrary("kernel32.dll")
            kernel_dll.CreateSymbolicLinkA(target, link_name, 0)

1591 1592 1593
    def check_in_static(self, main_program, base_map):
        for var in main_program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
1594 1595 1596
                new_t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
1597
                base_t = base_map[var.name]
1598
                np.testing.assert_array_equal(new_t, base_t)
1599

H
hong 已提交
1600 1601

class TestProgramStateOldSaveSingleModel(unittest.TestCase):
1602
    def set_place(self):
1603 1604 1605 1606 1607
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1608

H
hong 已提交
1609 1610 1611 1612 1613 1614 1615 1616 1617
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1618
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1619 1620 1621 1622

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1623 1624 1625 1626 1627 1628 1629 1630
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1631

1632
            place = self.set_place()
H
hong 已提交
1633 1634
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1635 1636 1637
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1638
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1639 1640 1641 1642 1643 1644
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1645 1646

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1647 1648
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1649 1650 1651

            test_program = fluid.default_main_program().clone(for_test=True)

1652 1653 1654 1655 1656 1657
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1674 1675 1676 1677 1678
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1679
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1680 1681 1682 1683 1684 1685 1686 1687 1688 1689
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1690 1691 1692 1693 1694 1695 1696 1697 1698
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1699 1700 1701
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1702
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1703 1704 1705
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1706
            save_dir = os.path.join(temp_dir.name, "test_program_2")
1707
            paddle.distributed.io.save_persistables(
1708 1709
                exe, save_dir, main_program, filename="model_1"
            )
H
hong 已提交
1710 1711 1712 1713 1714 1715 1716

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1717 1718 1719
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1720
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1721 1722
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1723
            # fluid.load(test_program, "./test_1", None )
H
hong 已提交
1724
            program_state = fluid.load_program_state(
1725
                os.path.join(save_dir, "model_1"),
1726 1727
                var_list=fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1728 1729 1730 1731
            fluid.set_program_state(main_program, program_state)

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1732 1733 1734
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1735
                    base_t = base_map[var.name]
1736
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1737 1738

            with self.assertRaises(ValueError):
1739
                fluid.load_program_state(os.path.join(save_dir, "model_1"))
H
hong 已提交
1740 1741

            with self.assertRaises(TypeError):
1742 1743 1744
                fluid.load_program_state(
                    os.path.join(save_dir, "model_1"), var_list=["str"]
                )
H
hong 已提交
1745 1746 1747

            with self.assertRaises(RuntimeError):
                fluid.load_program_state(
1748
                    os.path.join(save_dir, "model_1"),
H
hong 已提交
1749 1750
                    var_list=[
                        main_program.global_block().create_var(
1751 1752 1753 1754
                            name="fake_var_name", persistable=True
                        )
                    ],
                )
1755
        temp_dir.cleanup()
H
hong 已提交
1756 1757


W
WeiXin 已提交
1758 1759 1760 1761 1762 1763 1764
class TestStaticSaveLoadPickle(unittest.TestCase):
    def test_pickle_protocol(self):
        # enable static mode
        paddle.enable_static()

        with new_program_scope():
            # create network
1765 1766 1767 1768 1769
            x = paddle.static.data(
                name="static_save_load_large_x",
                shape=[None, 10],
                dtype='float32',
            )
W
WeiXin 已提交
1770 1771 1772 1773 1774 1775 1776 1777 1778
            z = paddle.static.nn.fc(x, 10, bias_attr=False)
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()

            base_map = {}
            for var in prog.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1779 1780 1781
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
W
WeiXin 已提交
1782 1783 1784 1785
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1786
            temp_dir = tempfile.TemporaryDirectory()
1787 1788 1789
            path = os.path.join(
                temp_dir.name, "test_static_save_load_pickle", "pickle_protocol"
            )
W
WeiXin 已提交
1790 1791 1792 1793 1794 1795 1796 1797 1798 1799

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 2.0)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 1)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 5)

1800
            protocols = [2, 3, 4]
W
WeiXin 已提交
1801 1802 1803 1804 1805
            for protocol in protocols:
                paddle.fluid.save(prog, path, protocol)
                # set var to zero
                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1806 1807 1808
                        ten = (
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1809 1810
                        ten.set(np.zeros_like(np.array(ten)), place)

1811 1812 1813
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1814 1815 1816 1817 1818 1819
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

                paddle.fluid.load(prog, path)

                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1820 1821 1822
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1823
                        base_t = base_map[var.name]
1824
                        np.testing.assert_array_equal(new_t, base_t)
W
WeiXin 已提交
1825 1826


1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845
class TestSaveLoadInferenceModel(unittest.TestCase):
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'no_params')

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_no_params(self):
        main_program = framework.Program()
        with framework.program_guard(main_program):
            x = paddle.static.data(name="x", shape=[10, 10], dtype='float32')
            y = x + x

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)

            paddle.static.save_inference_model(self.model_path, [x], [y], exe)

1846 1847 1848 1849 1850
            [
                inference_program,
                feed_target_names,
                fetch_targets,
            ] = paddle.static.load_inference_model(self.model_path, exe)
1851 1852 1853 1854 1855 1856 1857

            self.assertEqual(feed_target_names, ['x'])
            self.assertEqual(fetch_targets[0].shape, (10, 10))
            ops = [op.type for op in inference_program.block(0).ops]
            self.assertEqual(ops, ['feed', 'elementwise_add', 'scale', 'fetch'])


H
hong 已提交
1858
if __name__ == '__main__':
1859
    paddle.enable_static()
H
hong 已提交
1860
    unittest.main()