test_static_save_load.py 70.3 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

W
WeiXin 已提交
15
import sys
H
hong 已提交
16 17

import unittest
18
import paddle
H
hong 已提交
19 20 21 22 23 24
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam
from test_imperative_base import new_program_scope
import numpy as np
25
import pickle
H
hong 已提交
26
import os
27
import errno
28
import tempfile
H
hong 已提交
29

30 31
paddle.enable_static()

H
hong 已提交
32 33

class SimpleLSTMRNN(fluid.Layer):
34 35 36 37 38 39 40 41 42
    def __init__(
        self,
        name_scope,
        hidden_size,
        num_steps,
        num_layers=2,
        init_scale=0.1,
        dropout=None,
    ):
43
        super().__init__()
H
hong 已提交
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
        self._hidden_size = hidden_size
        self._num_layers = num_layers
        self._init_scale = init_scale
        self._dropout = dropout
        self._input = None
        self._num_steps = num_steps
        self.cell_array = []
        self.hidden_array = []

        self.weight_1_arr = []
        self.weight_2_arr = []
        self.bias_arr = []
        self.mask_array = []

        for i in range(self._num_layers):
            weight_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
62 63 64
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
65 66 67
                shape=[self._hidden_size * 2, self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.UniformInitializer(
68 69 70
                    low=-self._init_scale, high=self._init_scale
                ),
            )
H
hong 已提交
71 72 73 74
            self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
            bias_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
75 76 77
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
78 79
                shape=[self._hidden_size * 4],
                dtype="float32",
80 81
                default_initializer=fluid.initializer.Constant(0.0),
            )
H
hong 已提交
82 83 84 85 86 87 88
            self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))

    def forward(self, input_embedding, init_hidden=None, init_cell=None):
        self.cell_array = []
        self.hidden_array = []

        for i in range(self._num_layers):
89 90 91 92 93 94 95 96 97 98 99 100
            pre_hidden = fluid.layers.slice(
                init_hidden, axes=[0], starts=[i], ends=[i + 1]
            )
            pre_cell = fluid.layers.slice(
                init_cell, axes=[0], starts=[i], ends=[i + 1]
            )
            pre_hidden = fluid.layers.reshape(
                pre_hidden, shape=[-1, self._hidden_size]
            )
            pre_cell = fluid.layers.reshape(
                pre_cell, shape=[-1, self._hidden_size]
            )
H
hong 已提交
101 102 103 104 105
            self.hidden_array.append(pre_hidden)
            self.cell_array.append(pre_cell)

        res = []
        for index in range(self._num_steps):
106 107 108 109 110 111
            self._input = fluid.layers.slice(
                input_embedding, axes=[1], starts=[index], ends=[index + 1]
            )
            self._input = fluid.layers.reshape(
                self._input, shape=[-1, self._hidden_size]
            )
H
hong 已提交
112 113 114 115 116 117 118 119 120 121
            for k in range(self._num_layers):
                pre_hidden = self.hidden_array[k]
                pre_cell = self.cell_array[k]
                weight_1 = self.weight_1_arr[k]
                bias = self.bias_arr[k]

                nn = fluid.layers.concat([self._input, pre_hidden], 1)
                gate_input = fluid.layers.matmul(x=nn, y=weight_1)

                gate_input = fluid.layers.elementwise_add(gate_input, bias)
122 123 124
                i, j, f, o = fluid.layers.split(
                    gate_input, num_or_sections=4, dim=-1
                )
H
hong 已提交
125
                c = pre_cell * fluid.layers.sigmoid(f) + fluid.layers.sigmoid(
126 127
                    i
                ) * fluid.layers.tanh(j)
H
hong 已提交
128 129 130 131 132 133 134 135 136
                m = fluid.layers.tanh(c) * fluid.layers.sigmoid(o)
                self.hidden_array[k] = m
                self.cell_array[k] = c
                self._input = m

                if self._dropout is not None and self._dropout > 0.0:
                    self._input = fluid.layers.dropout(
                        self._input,
                        dropout_prob=self._dropout,
137 138
                        dropout_implementation='upscale_in_train',
                    )
H
hong 已提交
139
            res.append(
140 141 142 143
                fluid.layers.reshape(
                    self._input, shape=[1, -1, self._hidden_size]
                )
            )
H
hong 已提交
144 145 146 147
        real_res = fluid.layers.concat(res, 0)
        real_res = fluid.layers.transpose(x=real_res, perm=[1, 0, 2])
        last_hidden = fluid.layers.concat(self.hidden_array, 1)
        last_hidden = fluid.layers.reshape(
148 149
            last_hidden, shape=[-1, self._num_layers, self._hidden_size]
        )
H
hong 已提交
150 151 152
        last_hidden = fluid.layers.transpose(x=last_hidden, perm=[1, 0, 2])
        last_cell = fluid.layers.concat(self.cell_array, 1)
        last_cell = fluid.layers.reshape(
153 154
            last_cell, shape=[-1, self._num_layers, self._hidden_size]
        )
H
hong 已提交
155 156 157 158 159
        last_cell = fluid.layers.transpose(x=last_cell, perm=[1, 0, 2])
        return real_res, last_hidden, last_cell


class PtbModel(fluid.Layer):
160 161 162 163 164 165 166 167 168 169
    def __init__(
        self,
        name_scope,
        hidden_size,
        vocab_size,
        num_layers=2,
        num_steps=20,
        init_scale=0.1,
        dropout=None,
    ):
170
        super().__init__()
H
hong 已提交
171 172 173 174 175 176
        self.hidden_size = hidden_size
        self.vocab_size = vocab_size
        self.init_scale = init_scale
        self.num_layers = num_layers
        self.num_steps = num_steps
        self.dropout = dropout
177 178 179 180 181 182 183 184
        self.simple_lstm_rnn = SimpleLSTMRNN(
            self.full_name(),
            hidden_size,
            num_steps,
            num_layers=num_layers,
            init_scale=init_scale,
            dropout=dropout,
        )
185 186 187 188
        self.embedding = paddle.nn.Embedding(
            num_embeddings=vocab_size,
            embedding_dim=hidden_size,
            weight_attr=fluid.ParamAttr(
H
hong 已提交
189 190
                name='embedding_para',
                initializer=fluid.initializer.UniformInitializer(
191 192 193 194
                    low=-init_scale, high=init_scale
                ),
            ),
        )
H
hong 已提交
195 196 197 198 199
        self.softmax_weight = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.hidden_size, self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
200 201 202
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
203 204 205 206 207
        self.softmax_bias = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
208 209 210
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
211 212 213

    def forward(self, input, label, init_hidden, init_cell):
        init_h = fluid.layers.reshape(
214 215
            init_hidden, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
216 217

        init_c = fluid.layers.reshape(
218 219
            init_cell, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
220

221 222
        # NPU 'tok_k' kernel only support `int32` dtype, so cast `input` from `int64` to `int32`.
        input = fluid.layers.cast(input, "int32")
H
hong 已提交
223 224
        x_emb = self.embedding(input)
        x_emb = fluid.layers.reshape(
225 226
            x_emb, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
227 228 229 230
        if self.dropout is not None and self.dropout > 0.0:
            x_emb = fluid.layers.dropout(
                x_emb,
                dropout_prob=self.drop_out,
231 232
                dropout_implementation='upscale_in_train',
            )
233
        rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(
234 235
            x_emb, init_h, init_c
        )
H
hong 已提交
236 237

        rnn_out = fluid.layers.reshape(
238 239
            rnn_out, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
240 241
        projection = fluid.layers.matmul(rnn_out, self.softmax_weight)
        projection = fluid.layers.elementwise_add(projection, self.softmax_bias)
242 243 244 245 246 247
        projection = fluid.layers.reshape(
            projection, shape=[-1, self.vocab_size]
        )
        loss = fluid.layers.softmax_with_cross_entropy(
            logits=projection, label=label, soft_label=False
        )
H
hong 已提交
248 249 250 251 252 253 254
        loss = fluid.layers.reshape(loss, shape=[-1, self.num_steps])
        loss = fluid.layers.reduce_mean(loss, dim=[0])
        loss = fluid.layers.reduce_sum(loss)

        return loss, last_hidden, last_cell


255
class TestSaveLoadBase(unittest.TestCase):
256
    def set_place(self):
257 258 259 260 261
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
262

H
hong 已提交
263 264 265 266 267 268 269 270 271
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
272
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
273 274 275 276

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
277 278 279 280 281 282 283 284
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
285

286
            place = self.set_place()
H
hong 已提交
287 288
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
289 290 291
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
292
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
293 294 295 296 297 298
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
299 300

            static_loss, static_last_hidden, static_last_cell = ptb_model(
301 302
                x, y, init_hidden, init_cell
            )
H
hong 已提交
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
318 319 320 321 322
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
323
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
324 325 326 327 328 329 330 331 332 333
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
334 335 336 337 338 339 340 341
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
342
                if isinstance(var, framework.Parameter) or var.persistable:
343 344 345
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
346
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
347 348 349
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

350
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
351 352 353

            # set var to zero
            for var in main_program.list_vars():
354
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
355 356 357
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

358 359 360
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
361
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
362 363
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

364 365 366 367 368
            fluid.load(
                main_program,
                os.path.join(temp_dir.name, "test_1.pdparams"),
                exe,
            )
H
hong 已提交
369 370

            for var in main_program.list_vars():
371
                if isinstance(var, framework.Parameter) or var.persistable:
372 373 374
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
375
                    base_t = base_map[var.name]
376
                    np.testing.assert_array_equal(new_t, base_t)
377
            temp_dir.cleanup()
H
hong 已提交
378 379


380
class TestSaveLoadPartial(unittest.TestCase):
381
    def set_place(self):
382 383 384 385 386
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
387

H
hong 已提交
388 389 390 391 392 393 394 395 396
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
397
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
398 399 400 401

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
402 403 404 405 406 407 408 409
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
410

411
            place = self.set_place()
H
hong 已提交
412 413
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
414 415 416
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
417
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
418 419 420 421 422 423
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
424 425

            static_loss, static_last_hidden, static_last_cell = ptb_model(
426 427
                x, y, init_hidden, init_cell
            )
H
hong 已提交
428 429 430

            test_program = fluid.default_main_program().clone(for_test=True)

431 432 433 434 435 436
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
453 454 455 456 457
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
458
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
459 460 461 462 463 464 465 466 467 468
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
469 470 471 472 473 474 475 476
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
477
                if isinstance(var, framework.Parameter) or var.persistable:
478 479 480
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
481
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
482 483 484
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

485
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
486 487 488

            # set var to zero
            for var in main_program.list_vars():
489
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
490 491 492
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

493 494 495
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
496
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
497 498
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

499 500 501
            fluid.load(
                test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None
            )
H
hong 已提交
502 503

            for var in test_program.list_vars():
504
                if isinstance(var, framework.Parameter) or var.persistable:
505 506 507
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
508
                    base_t = base_map[var.name]
509
                    np.testing.assert_array_equal(new_t, base_t)
510 511 512 513 514
            fluid.load(
                test_program,
                os.path.join(temp_dir.name, "test_1.pdmodel"),
                None,
            )
515
            temp_dir.cleanup()
H
hong 已提交
516 517


518
class TestSaveLoadSetStateDict(unittest.TestCase):
519
    def set_place(self):
520 521 522 523 524
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
525

526 527 528 529 530 531 532 533 534
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
535
        temp_dir = tempfile.TemporaryDirectory()
536 537 538 539

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
540 541 542 543 544 545 546 547
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
548

549
            place = self.set_place()
550 551
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
552 553 554
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
555
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
556 557 558 559 560 561
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
562 563

            static_loss, static_last_hidden, static_last_cell = ptb_model(
564 565
                x, y, init_hidden, init_cell
            )
566 567 568 569 570 571 572 573 574 575 576 577 578 579 580
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
581 582 583 584 585
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
586
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
587 588 589 590 591 592 593 594 595 596
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
597 598 599 600 601 602 603 604 605
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
606 607 608
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
609
                    # make sure all the paramerter or optimizer var have been update
610 611 612
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

613
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
614 615 616 617 618 619 620

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

621 622 623
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
624
                    # make sure all the paramerter or optimizer var have been set to zero
625 626
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

627
            fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe)
628 629 630

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
631 632 633
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
634
                    base_t = base_map[var.name]
635
                    np.testing.assert_array_equal(new_t, base_t)
636
            temp_dir.cleanup()
637 638 639


class TestProgramStatePartial(unittest.TestCase):
640
    def set_place(self):
641 642 643 644 645
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
646

647 648 649 650 651 652 653 654 655
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
656
        temp_dir = tempfile.TemporaryDirectory()
657 658 659 660

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
661 662 663 664 665 666 667 668
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
669

670
            place = self.set_place()
671 672
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
673 674 675
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
676
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
677 678 679 680 681 682
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
683 684

            static_loss, static_last_hidden, static_last_cell = ptb_model(
685 686
                x, y, init_hidden, init_cell
            )
687 688 689

            test_program = fluid.default_main_program().clone(for_test=True)

690 691 692 693 694 695
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
712 713 714 715 716
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
717
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
718 719 720 721 722 723 724 725 726 727
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
728 729 730 731 732 733 734 735 736
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
737 738 739
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
740
                    # make sure all the paramerter or optimizer var have been update
741 742 743
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

744
            fluid.save(main_program, os.path.join(temp_dir.name, 'test_1'))
745 746 747 748 749 750 751

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

752 753 754
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
755
                    # make sure all the paramerter or optimizer var have been set to zero
756 757
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

758
            # fluid.load(test_program, "./test_1", None )
759
            program_state = fluid.load_program_state(
760 761
                os.path.join(temp_dir.name, 'test_1')
            )
H
hong 已提交
762 763

            program_state_1 = fluid.load_program_state(
764 765
                os.path.join(temp_dir.name, 'test_1.pdparams')
            )
H
hong 已提交
766 767

            program_state_2 = fluid.load_program_state(
768 769
                os.path.join(temp_dir.name, 'test_1.pdopt')
            )
H
hong 已提交
770 771

            program_state_3 = fluid.load_program_state(
772 773
                os.path.join(temp_dir.name, 'test_1.pdmodel')
            )
H
hong 已提交
774

775 776 777 778
            fluid.set_program_state(test_program, program_state)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
779 780 781
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
782
                    base_t = base_map[var.name]
783
                    np.testing.assert_array_equal(new_t, base_t)
784

H
hong 已提交
785 786 787 788 789 790
            # check 1
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

791 792 793
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
794
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
795 796 797 798 799 800
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_1)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
801 802 803
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
804
                    base_t = base_map[var.name]
805
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
806 807 808 809 810 811 812

            # check 2
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

813 814 815
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
816
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
817 818 819 820 821 822
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_2)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
823 824 825
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
826
                    base_t = base_map[var.name]
827
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
828 829 830 831 832 833 834

            # check 3
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

835 836 837
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
838
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
839 840 841 842 843 844
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_3)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
845 846 847
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
848
                    base_t = base_map[var.name]
849
                    np.testing.assert_array_equal(new_t, base_t)
850
            temp_dir.cleanup()
H
hong 已提交
851

852 853

class TestVariableInit(unittest.TestCase):
854
    def set_place(self):
855 856 857 858 859
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
860

861 862 863 864 865 866
    def test_variable_init(self):

        x = fluid.data(name="x", shape=[10, 10], dtype='float32')
        y = fluid.layers.fc(x, 10)
        z = fluid.layers.fc(y, 10)

867
        place = self.set_place()
868 869 870
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

871
        temp_dir = tempfile.TemporaryDirectory()
872 873 874 875
        fluid.save(
            fluid.default_main_program(),
            os.path.join(temp_dir.name, "test_path"),
        )
876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893

        def set_var(var, ndarray):
            t = var.get_tensor()
            p = t._place()
            if p.is_cpu_place():
                place = paddle.fluid.CPUPlace()
            elif p.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            else:
                p = paddle.fluid.core.Place()
                p.set_place(t._place())
                place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            t.set(ndarray, place)

        program = fluid.default_main_program()
        new_scope = fluid.core.Scope()

894
        place = self.set_place()
895
        exe = fluid.Executor(place)
896 897 898
        parameter_list = list(
            filter(fluid.io.is_parameter, program.list_vars())
        )
899

900 901 902
        fluid.core._create_loaded_parameter(
            parameter_list, new_scope, exe._default_executor
        )
903
        parameter_file_name = os.path.join(temp_dir.name, "test_path.pdparams")
904 905 906 907
        with open(parameter_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in parameter_list:
908 909 910 911 912
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, parameter_file_name
            )
913 914 915 916
            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        opt_list = list(
917 918
            filter(fluid.io.is_belong_to_optimizer, program.list_vars())
        )
919

920 921 922
        fluid.core._create_loaded_parameter(
            opt_list, new_scope, exe._default_executor
        )
923
        opt_file_name = os.path.join(temp_dir.name, "test_path.pdopt")
924 925 926 927
        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in opt_list:
928 929 930 931 932
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, opt_file_name
            )
933 934 935 936 937 938 939

            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        base_map = {}
        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
940 941 942
                t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
T
tianshuo78520a 已提交
943
                # make sure all the paramerter or optimizer var have been update
944 945 946 947 948 949 950
                base_map[var.name] = t

        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
                new_t = np.array(new_scope.find_var(var.name).get_tensor())
                base_t = base_map[var.name]

951
                np.testing.assert_array_equal(new_t, base_t)
952
        temp_dir.cleanup()
953 954


H
hong 已提交
955 956 957 958 959
class TestLoadFromOldInterface(unittest.TestCase):
    def setUp(self):
        if os.path.exists("test_path.pdparams"):
            os.remove("test_path.pdparams")

960 961 962
        if os.path.exists("test_static_load_var_list.pdparams"):
            os.remove("test_static_load_var_list.pdparams")

963 964
        self.temp_dir = tempfile.TemporaryDirectory()

965
    def set_place(self):
966 967 968 969 970
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
971

972 973 974
    def tearDown(self):
        self.temp_dir.cleanup()

H
hong 已提交
975 976 977 978 979 980 981 982 983 984 985 986 987
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
988 989 990 991 992 993 994 995
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
996

997
            place = self.set_place()
H
hong 已提交
998 999
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1000 1001 1002
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1003
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1004 1005 1006 1007 1008 1009
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1010 1011

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1012 1013
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1031 1032 1033 1034 1035
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1036
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1037 1038 1039 1040 1041 1042 1043 1044 1045 1046
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1047 1048 1049 1050 1051 1052 1053 1054 1055
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1056 1057 1058
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1059
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1060 1061 1062
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1063
            # fluid.save(main_program, "./test_1")
1064
            paddle.distributed.io.save_persistables(
1065 1066
                exe, os.path.join(self.temp_dir.name, "test_path"), main_program
            )
H
hong 已提交
1067 1068 1069 1070 1071 1072 1073

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1074 1075 1076
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1077
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1078 1079
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1080 1081 1082
            fluid.load(
                main_program, os.path.join(self.temp_dir.name, "test_path"), exe
            )
H
hong 已提交
1083 1084 1085

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1086 1087 1088
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1089
                    base_t = base_map[var.name]
1090
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1091 1092 1093 1094 1095 1096 1097 1098 1099

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)
            with self.assertRaises(RuntimeError):
1100 1101 1102 1103 1104
                fluid.load(
                    main_program,
                    os.path.join(self.temp_dir.name, "test_path"),
                    exe,
                )
H
hong 已提交
1105

T
tianshuo78520a 已提交
1106
            # check unused parameter
H
hong 已提交
1107

1108 1109 1110 1111 1112
            fluid.load(
                test_clone_program,
                os.path.join(self.temp_dir.name, "test_path"),
                exe,
            )
H
hong 已提交
1113

1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126
    def test_load_from_old_interface_var_list(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1127 1128 1129 1130 1131 1132 1133 1134
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
1135

1136
            place = self.set_place()
1137 1138
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1139 1140 1141
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
1142
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1143 1144 1145 1146 1147 1148
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
1149 1150

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1151 1152
                x, y, init_hidden, init_cell
            )
1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1170 1171 1172 1173 1174
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
1175
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1176 1177 1178 1179 1180 1181 1182 1183 1184 1185
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
1186 1187 1188 1189 1190 1191 1192 1193 1194
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1195 1196 1197
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1198 1199 1200 1201
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1202
            # fluid.save(main_program, "./test_1")
1203
            paddle.distributed.io.save_persistables(
1204 1205
                exe,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1206 1207
                main_program,
            )
1208

1209
            # set var to zero
1210 1211 1212 1213 1214 1215 1216 1217
            var_list = []
            for i, var in enumerate(main_program.list_vars()):
                if isinstance(var, framework.Parameter) or var.persistable:
                    if i % 2 == 0:
                        var_list.append(var)
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1218 1219 1220
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1221 1222 1223
                    # make sure all the paramerter or optimizer var have been set to zero
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1224 1225 1226
            fluid.load(
                main_program,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1227 1228 1229
                exe,
                var_list,
            )
1230 1231 1232
            var_list_names = [var.name for var in var_list]
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1233 1234 1235
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1236 1237 1238
                    if var.name in var_list_names:
                        # loaded vars
                        base_t = base_map[var.name]
1239
                        np.testing.assert_array_equal(new_t, base_t)
1240
                    else:
1241
                        # not loaded vars
1242 1243
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

H
hong 已提交
1244 1245

class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
1246
    def set_place(self):
1247 1248 1249 1250 1251
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1252

H
hong 已提交
1253 1254 1255 1256 1257 1258 1259 1260 1261
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1262
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1263 1264 1265 1266

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1267 1268 1269 1270 1271 1272 1273 1274
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1275

1276
            place = self.set_place()
H
hong 已提交
1277 1278
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1279 1280 1281
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1282
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1283 1284 1285 1286 1287 1288
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1289 1290

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1291 1292
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1308 1309 1310 1311 1312
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1313
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1314 1315 1316 1317 1318 1319 1320 1321 1322 1323
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1324 1325 1326 1327 1328 1329 1330 1331 1332
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1333 1334 1335
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1336
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1337 1338
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1339
            save_dir = os.path.join(temp_dir.name, "test_path")
1340
            # fluid.save(main_program, "./test_1")
1341
            paddle.distributed.io.save_persistables(
1342 1343
                exe, save_dir, main_program, filename="model_single"
            )
H
hong 已提交
1344 1345 1346 1347 1348 1349 1350

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1351 1352 1353
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1354
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1355 1356
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1357
            file_model_path = os.path.join(save_dir, "model_single")
1358 1359 1360 1361 1362 1363
            fluid.load(
                main_program,
                file_model_path,
                exe,
                fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1364 1365 1366

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1367 1368 1369
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1370
                    base_t = base_map[var.name]
1371
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383

            # test exception
            # change shape
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)

            with self.assertRaises(RuntimeError):
1384 1385 1386 1387 1388 1389 1390 1391 1392 1393
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )

            fluid.io.save_params(
                exe, "test_path", main_program, filename="model_single"
            )
H
hong 已提交
1394
            with self.assertRaises(RuntimeError):
1395 1396 1397 1398 1399 1400
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1401 1402 1403

            # check when executor is None
            with self.assertRaises(ValueError):
1404 1405 1406 1407 1408 1409
                fluid.load(
                    main_program,
                    file_model_path,
                    None,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1410 1411 1412 1413 1414 1415 1416

            # check when var list is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, exe, None)

            # check save params, load var_list = get_program_persistable_vars
            with self.assertRaises(RuntimeError):
1417 1418 1419
                temp_var = framework.Variable(
                    main_program.global_block(), shape=[1], name="test_temp_var"
                )
H
hong 已提交
1420
                all_var_list = list(main_program.list_vars())
1421 1422 1423 1424 1425 1426
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    all_var_list + [temp_var],
                )
1427
        temp_dir.cleanup()
H
hong 已提交
1428 1429


H
hong 已提交
1430
class TestProgramStateOldSave(unittest.TestCase):
1431 1432
    def setUp(self):
        self.test_dygraph = True
1433 1434 1435 1436
        self.temp_dir = tempfile.TemporaryDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()
1437 1438

    def set_place(self):
1439 1440 1441 1442 1443
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1444

H
hong 已提交
1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1458 1459 1460 1461 1462 1463 1464 1465
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1466

1467
            place = self.set_place()
H
hong 已提交
1468 1469
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1470 1471 1472
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1473
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1474 1475 1476 1477 1478 1479
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1480 1481

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1482 1483
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1484 1485 1486

            test_program = fluid.default_main_program().clone(for_test=True)

1487 1488 1489 1490 1491 1492
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1509 1510 1511 1512 1513
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1514
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1515 1516 1517 1518 1519 1520 1521 1522 1523 1524
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1525 1526 1527 1528 1529 1530 1531 1532 1533
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1534 1535 1536
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1537
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1538 1539
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1540
            save_dir = os.path.join(self.temp_dir.name, "test_program_1")
1541
            paddle.distributed.io.save_persistables(exe, save_dir, main_program)
H
hong 已提交
1542 1543 1544 1545 1546 1547 1548

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1549 1550 1551
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1552
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1553 1554
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1555
            # case 1: load basic
1556
            program_state = fluid.load_program_state(save_dir)
H
hong 已提交
1557
            fluid.set_program_state(main_program, program_state)
1558 1559 1560
            self.check_in_static(main_program, base_map)

            # case 2: load with no need file
1561 1562
            def symlink_force(target, link_name):
                try:
1563
                    self.create_symlink(target, link_name)
1564 1565 1566
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        os.remove(link_name)
1567
                        self.create_symlink(target, link_name)
1568 1569 1570
                    else:
                        raise e

1571
            program_state = fluid.load_program_state(save_dir)
1572 1573
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1574

1575 1576
            # case 3: load with var_list
            program_state = fluid.load_program_state(
1577 1578
                save_dir, main_program.all_parameters()
            )
1579 1580
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1581

1582 1583 1584
        if self.test_dygraph:
            # make sure `load_program_state` can be used in dynamic graph mode
            with fluid.dygraph.guard(place):
1585
                load_state = fluid.load_program_state(save_dir)
1586
                for k, v in load_state.items():
1587
                    np.testing.assert_array_equal(base_map[k], v)
1588

1589 1590 1591 1592 1593
    def create_symlink(self, target, link_name):
        try:
            os.symlink(target, link_name)
        except AttributeError:
            import ctypes
1594

1595 1596 1597
            kernel_dll = ctypes.windll.LoadLibrary("kernel32.dll")
            kernel_dll.CreateSymbolicLinkA(target, link_name, 0)

1598 1599 1600
    def check_in_static(self, main_program, base_map):
        for var in main_program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
1601 1602 1603
                new_t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
1604
                base_t = base_map[var.name]
1605
                np.testing.assert_array_equal(new_t, base_t)
1606

H
hong 已提交
1607 1608

class TestProgramStateOldSaveSingleModel(unittest.TestCase):
1609
    def set_place(self):
1610 1611 1612 1613 1614
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1615

H
hong 已提交
1616 1617 1618 1619 1620 1621 1622 1623 1624
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1625
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1626 1627 1628 1629

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1630 1631 1632 1633 1634 1635 1636 1637
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1638

1639
            place = self.set_place()
H
hong 已提交
1640 1641
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1642 1643 1644
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1645
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1646 1647 1648 1649 1650 1651
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1652 1653

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1654 1655
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1656 1657 1658

            test_program = fluid.default_main_program().clone(for_test=True)

1659 1660 1661 1662 1663 1664
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1681 1682 1683 1684 1685
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1686
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1687 1688 1689 1690 1691 1692 1693 1694 1695 1696
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1697 1698 1699 1700 1701 1702 1703 1704 1705
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1706 1707 1708
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1709
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1710 1711 1712
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1713
            save_dir = os.path.join(temp_dir.name, "test_program_2")
1714
            paddle.distributed.io.save_persistables(
1715 1716
                exe, save_dir, main_program, filename="model_1"
            )
H
hong 已提交
1717 1718 1719 1720 1721 1722 1723

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1724 1725 1726
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1727
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1728 1729
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1730
            # fluid.load(test_program, "./test_1", None )
H
hong 已提交
1731
            program_state = fluid.load_program_state(
1732
                os.path.join(save_dir, "model_1"),
1733 1734
                var_list=fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1735 1736 1737 1738
            fluid.set_program_state(main_program, program_state)

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1739 1740 1741
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1742
                    base_t = base_map[var.name]
1743
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1744 1745

            with self.assertRaises(ValueError):
1746
                fluid.load_program_state(os.path.join(save_dir, "model_1"))
H
hong 已提交
1747 1748

            with self.assertRaises(TypeError):
1749 1750 1751
                fluid.load_program_state(
                    os.path.join(save_dir, "model_1"), var_list=["str"]
                )
H
hong 已提交
1752 1753 1754

            with self.assertRaises(RuntimeError):
                fluid.load_program_state(
1755
                    os.path.join(save_dir, "model_1"),
H
hong 已提交
1756 1757
                    var_list=[
                        main_program.global_block().create_var(
1758 1759 1760 1761
                            name="fake_var_name", persistable=True
                        )
                    ],
                )
1762
        temp_dir.cleanup()
H
hong 已提交
1763 1764


W
WeiXin 已提交
1765 1766 1767 1768 1769 1770 1771
class TestStaticSaveLoadPickle(unittest.TestCase):
    def test_pickle_protocol(self):
        # enable static mode
        paddle.enable_static()

        with new_program_scope():
            # create network
1772 1773 1774 1775 1776
            x = paddle.static.data(
                name="static_save_load_large_x",
                shape=[None, 10],
                dtype='float32',
            )
W
WeiXin 已提交
1777 1778 1779 1780 1781 1782 1783 1784 1785
            z = paddle.static.nn.fc(x, 10, bias_attr=False)
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()

            base_map = {}
            for var in prog.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1786 1787 1788
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
W
WeiXin 已提交
1789 1790 1791 1792
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1793
            temp_dir = tempfile.TemporaryDirectory()
1794 1795 1796
            path = os.path.join(
                temp_dir.name, "test_static_save_load_pickle", "pickle_protocol"
            )
W
WeiXin 已提交
1797 1798 1799 1800 1801 1802 1803 1804 1805 1806

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 2.0)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 1)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 5)

1807 1808 1809
            protocols = [
                2,
            ]
W
WeiXin 已提交
1810 1811 1812 1813 1814 1815 1816
            if sys.version_info.major >= 3 and sys.version_info.minor >= 4:
                protocols += [3, 4]
            for protocol in protocols:
                paddle.fluid.save(prog, path, protocol)
                # set var to zero
                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1817 1818 1819
                        ten = (
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1820 1821
                        ten.set(np.zeros_like(np.array(ten)), place)

1822 1823 1824
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1825 1826 1827 1828 1829 1830
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

                paddle.fluid.load(prog, path)

                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1831 1832 1833
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1834
                        base_t = base_map[var.name]
1835
                        np.testing.assert_array_equal(new_t, base_t)
W
WeiXin 已提交
1836 1837


1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856
class TestSaveLoadInferenceModel(unittest.TestCase):
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'no_params')

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_no_params(self):
        main_program = framework.Program()
        with framework.program_guard(main_program):
            x = paddle.static.data(name="x", shape=[10, 10], dtype='float32')
            y = x + x

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)

            paddle.static.save_inference_model(self.model_path, [x], [y], exe)

1857 1858 1859 1860 1861
            [
                inference_program,
                feed_target_names,
                fetch_targets,
            ] = paddle.static.load_inference_model(self.model_path, exe)
1862 1863 1864 1865 1866 1867 1868

            self.assertEqual(feed_target_names, ['x'])
            self.assertEqual(fetch_targets[0].shape, (10, 10))
            ops = [op.type for op in inference_program.block(0).ops]
            self.assertEqual(ops, ['feed', 'elementwise_add', 'scale', 'fetch'])


H
hong 已提交
1869
if __name__ == '__main__':
1870
    paddle.enable_static()
H
hong 已提交
1871
    unittest.main()