test_static_save_load.py 70.1 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

W
WeiXin 已提交
15
import sys
H
hong 已提交
16 17

import unittest
18
import paddle
H
hong 已提交
19 20 21 22 23 24
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam
from test_imperative_base import new_program_scope
import numpy as np
25
import pickle
H
hong 已提交
26
import os
27
import errno
28
import tempfile
H
hong 已提交
29

30 31
paddle.enable_static()

H
hong 已提交
32 33

class SimpleLSTMRNN(fluid.Layer):
34 35 36 37 38 39 40 41 42
    def __init__(
        self,
        name_scope,
        hidden_size,
        num_steps,
        num_layers=2,
        init_scale=0.1,
        dropout=None,
    ):
43
        super().__init__()
H
hong 已提交
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61
        self._hidden_size = hidden_size
        self._num_layers = num_layers
        self._init_scale = init_scale
        self._dropout = dropout
        self._input = None
        self._num_steps = num_steps
        self.cell_array = []
        self.hidden_array = []

        self.weight_1_arr = []
        self.weight_2_arr = []
        self.bias_arr = []
        self.mask_array = []

        for i in range(self._num_layers):
            weight_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
62 63 64
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
65 66 67
                shape=[self._hidden_size * 2, self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.UniformInitializer(
68 69 70
                    low=-self._init_scale, high=self._init_scale
                ),
            )
H
hong 已提交
71 72 73 74
            self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
            bias_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
75 76 77
                        low=-self._init_scale, high=self._init_scale
                    )
                ),
H
hong 已提交
78 79
                shape=[self._hidden_size * 4],
                dtype="float32",
80 81
                default_initializer=fluid.initializer.Constant(0.0),
            )
H
hong 已提交
82 83 84 85 86 87 88
            self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))

    def forward(self, input_embedding, init_hidden=None, init_cell=None):
        self.cell_array = []
        self.hidden_array = []

        for i in range(self._num_layers):
89 90 91 92 93 94
            pre_hidden = fluid.layers.slice(
                init_hidden, axes=[0], starts=[i], ends=[i + 1]
            )
            pre_cell = fluid.layers.slice(
                init_cell, axes=[0], starts=[i], ends=[i + 1]
            )
95
            pre_hidden = paddle.reshape(
96 97
                pre_hidden, shape=[-1, self._hidden_size]
            )
98
            pre_cell = paddle.reshape(pre_cell, shape=[-1, self._hidden_size])
H
hong 已提交
99 100 101 102 103
            self.hidden_array.append(pre_hidden)
            self.cell_array.append(pre_cell)

        res = []
        for index in range(self._num_steps):
104 105 106
            self._input = fluid.layers.slice(
                input_embedding, axes=[1], starts=[index], ends=[index + 1]
            )
107
            self._input = paddle.reshape(
108 109
                self._input, shape=[-1, self._hidden_size]
            )
H
hong 已提交
110 111 112 113 114 115 116 117 118 119
            for k in range(self._num_layers):
                pre_hidden = self.hidden_array[k]
                pre_cell = self.cell_array[k]
                weight_1 = self.weight_1_arr[k]
                bias = self.bias_arr[k]

                nn = fluid.layers.concat([self._input, pre_hidden], 1)
                gate_input = fluid.layers.matmul(x=nn, y=weight_1)

                gate_input = fluid.layers.elementwise_add(gate_input, bias)
120 121 122
                i, j, f, o = fluid.layers.split(
                    gate_input, num_or_sections=4, dim=-1
                )
123 124 125 126
                c = pre_cell * paddle.nn.functional.sigmoid(
                    f
                ) + paddle.nn.functional.sigmoid(i) * paddle.tanh(j)
                m = paddle.tanh(c) * paddle.nn.functional.sigmoid(o)
H
hong 已提交
127 128 129 130 131 132 133 134
                self.hidden_array[k] = m
                self.cell_array[k] = c
                self._input = m

                if self._dropout is not None and self._dropout > 0.0:
                    self._input = fluid.layers.dropout(
                        self._input,
                        dropout_prob=self._dropout,
135 136
                        dropout_implementation='upscale_in_train',
                    )
H
hong 已提交
137
            res.append(
138
                paddle.reshape(self._input, shape=[1, -1, self._hidden_size])
139
            )
H
hong 已提交
140 141 142
        real_res = fluid.layers.concat(res, 0)
        real_res = fluid.layers.transpose(x=real_res, perm=[1, 0, 2])
        last_hidden = fluid.layers.concat(self.hidden_array, 1)
143
        last_hidden = paddle.reshape(
144 145
            last_hidden, shape=[-1, self._num_layers, self._hidden_size]
        )
H
hong 已提交
146 147
        last_hidden = fluid.layers.transpose(x=last_hidden, perm=[1, 0, 2])
        last_cell = fluid.layers.concat(self.cell_array, 1)
148
        last_cell = paddle.reshape(
149 150
            last_cell, shape=[-1, self._num_layers, self._hidden_size]
        )
H
hong 已提交
151 152 153 154 155
        last_cell = fluid.layers.transpose(x=last_cell, perm=[1, 0, 2])
        return real_res, last_hidden, last_cell


class PtbModel(fluid.Layer):
156 157 158 159 160 161 162 163 164 165
    def __init__(
        self,
        name_scope,
        hidden_size,
        vocab_size,
        num_layers=2,
        num_steps=20,
        init_scale=0.1,
        dropout=None,
    ):
166
        super().__init__()
H
hong 已提交
167 168 169 170 171 172
        self.hidden_size = hidden_size
        self.vocab_size = vocab_size
        self.init_scale = init_scale
        self.num_layers = num_layers
        self.num_steps = num_steps
        self.dropout = dropout
173 174 175 176 177 178 179 180
        self.simple_lstm_rnn = SimpleLSTMRNN(
            self.full_name(),
            hidden_size,
            num_steps,
            num_layers=num_layers,
            init_scale=init_scale,
            dropout=dropout,
        )
181 182 183 184
        self.embedding = paddle.nn.Embedding(
            num_embeddings=vocab_size,
            embedding_dim=hidden_size,
            weight_attr=fluid.ParamAttr(
H
hong 已提交
185 186
                name='embedding_para',
                initializer=fluid.initializer.UniformInitializer(
187 188 189 190
                    low=-init_scale, high=init_scale
                ),
            ),
        )
H
hong 已提交
191 192 193 194 195
        self.softmax_weight = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.hidden_size, self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
196 197 198
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
199 200 201 202 203
        self.softmax_bias = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
204 205 206
                low=-self.init_scale, high=self.init_scale
            ),
        )
H
hong 已提交
207 208

    def forward(self, input, label, init_hidden, init_cell):
209
        init_h = paddle.reshape(
210 211
            init_hidden, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
212

213
        init_c = paddle.reshape(
214 215
            init_cell, shape=[self.num_layers, -1, self.hidden_size]
        )
H
hong 已提交
216

217 218
        # NPU 'tok_k' kernel only support `int32` dtype, so cast `input` from `int64` to `int32`.
        input = fluid.layers.cast(input, "int32")
H
hong 已提交
219
        x_emb = self.embedding(input)
220
        x_emb = paddle.reshape(
221 222
            x_emb, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
223 224 225 226
        if self.dropout is not None and self.dropout > 0.0:
            x_emb = fluid.layers.dropout(
                x_emb,
                dropout_prob=self.drop_out,
227 228
                dropout_implementation='upscale_in_train',
            )
229
        rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(
230 231
            x_emb, init_h, init_c
        )
H
hong 已提交
232

233
        rnn_out = paddle.reshape(
234 235
            rnn_out, shape=[-1, self.num_steps, self.hidden_size]
        )
H
hong 已提交
236 237
        projection = fluid.layers.matmul(rnn_out, self.softmax_weight)
        projection = fluid.layers.elementwise_add(projection, self.softmax_bias)
238
        projection = paddle.reshape(projection, shape=[-1, self.vocab_size])
239 240 241
        loss = fluid.layers.softmax_with_cross_entropy(
            logits=projection, label=label, soft_label=False
        )
242
        loss = paddle.reshape(loss, shape=[-1, self.num_steps])
H
hong 已提交
243 244 245 246 247 248
        loss = fluid.layers.reduce_mean(loss, dim=[0])
        loss = fluid.layers.reduce_sum(loss)

        return loss, last_hidden, last_cell


249
class TestSaveLoadBase(unittest.TestCase):
250
    def set_place(self):
251 252 253 254 255
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
256

H
hong 已提交
257 258 259 260 261 262 263 264 265
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
266
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
267 268 269 270

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
271 272 273 274 275 276 277 278
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
279

280
            place = self.set_place()
H
hong 已提交
281 282
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
283 284 285
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
286
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
287 288 289 290 291 292
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
293 294

            static_loss, static_last_hidden, static_last_cell = ptb_model(
295 296
                x, y, init_hidden, init_cell
            )
H
hong 已提交
297 298 299 300 301 302 303 304 305 306 307 308 309 310 311
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
312 313 314 315 316
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
317
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
318 319 320 321 322 323 324 325 326 327
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
328 329 330 331 332 333 334 335
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
336
                if isinstance(var, framework.Parameter) or var.persistable:
337 338 339
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
340
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
341 342 343
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

344
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
345 346 347

            # set var to zero
            for var in main_program.list_vars():
348
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
349 350 351
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

352 353 354
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
355
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
356 357
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

358 359 360 361 362
            fluid.load(
                main_program,
                os.path.join(temp_dir.name, "test_1.pdparams"),
                exe,
            )
H
hong 已提交
363 364

            for var in main_program.list_vars():
365
                if isinstance(var, framework.Parameter) or var.persistable:
366 367 368
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
369
                    base_t = base_map[var.name]
370
                    np.testing.assert_array_equal(new_t, base_t)
371
            temp_dir.cleanup()
H
hong 已提交
372 373


374
class TestSaveLoadPartial(unittest.TestCase):
375
    def set_place(self):
376 377 378 379 380
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
381

H
hong 已提交
382 383 384 385 386 387 388 389 390
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
391
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
392 393 394 395

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
396 397 398 399 400 401 402 403
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
404

405
            place = self.set_place()
H
hong 已提交
406 407
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
408 409 410
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
411
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
412 413 414 415 416 417
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
418 419

            static_loss, static_last_hidden, static_last_cell = ptb_model(
420 421
                x, y, init_hidden, init_cell
            )
H
hong 已提交
422 423 424

            test_program = fluid.default_main_program().clone(for_test=True)

425 426 427 428 429 430
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
447 448 449 450 451
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
452
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
453 454 455 456 457 458 459 460 461 462
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
463 464 465 466 467 468 469 470
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
471
                if isinstance(var, framework.Parameter) or var.persistable:
472 473 474
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
475
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
476 477 478
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

479
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
480 481 482

            # set var to zero
            for var in main_program.list_vars():
483
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
484 485 486
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

487 488 489
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
490
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
491 492
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

493 494 495
            fluid.load(
                test_program, os.path.join(temp_dir.name, "test_1.pdopt"), None
            )
H
hong 已提交
496 497

            for var in test_program.list_vars():
498
                if isinstance(var, framework.Parameter) or var.persistable:
499 500 501
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
502
                    base_t = base_map[var.name]
503
                    np.testing.assert_array_equal(new_t, base_t)
504 505 506 507 508
            fluid.load(
                test_program,
                os.path.join(temp_dir.name, "test_1.pdmodel"),
                None,
            )
509
            temp_dir.cleanup()
H
hong 已提交
510 511


512
class TestSaveLoadSetStateDict(unittest.TestCase):
513
    def set_place(self):
514 515 516 517 518
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
519

520 521 522 523 524 525 526 527 528
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
529
        temp_dir = tempfile.TemporaryDirectory()
530 531 532 533

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
534 535 536 537 538 539 540 541
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
542

543
            place = self.set_place()
544 545
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
546 547 548
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
549
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
550 551 552 553 554 555
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
556 557

            static_loss, static_last_hidden, static_last_cell = ptb_model(
558 559
                x, y, init_hidden, init_cell
            )
560 561 562 563 564 565 566 567 568 569 570 571 572 573 574
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
575 576 577 578 579
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
580
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
581 582 583 584 585 586 587 588 589 590
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
591 592 593 594 595 596 597 598 599
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
600 601 602
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
603
                    # make sure all the paramerter or optimizer var have been update
604 605 606
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

607
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
608 609 610 611 612 613 614

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

615 616 617
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
618
                    # make sure all the paramerter or optimizer var have been set to zero
619 620
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

621
            fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe)
622 623 624

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
625 626 627
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
628
                    base_t = base_map[var.name]
629
                    np.testing.assert_array_equal(new_t, base_t)
630
            temp_dir.cleanup()
631 632 633


class TestProgramStatePartial(unittest.TestCase):
634
    def set_place(self):
635 636 637 638 639
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
640

641 642 643 644 645 646 647 648 649
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
650
        temp_dir = tempfile.TemporaryDirectory()
651 652 653 654

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
655 656 657 658 659 660 661 662
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
663

664
            place = self.set_place()
665 666
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
667 668 669
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
670
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
671 672 673 674 675 676
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
677 678

            static_loss, static_last_hidden, static_last_cell = ptb_model(
679 680
                x, y, init_hidden, init_cell
            )
681 682 683

            test_program = fluid.default_main_program().clone(for_test=True)

684 685 686 687 688 689
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
706 707 708 709 710
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
711
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
712 713 714 715 716 717 718 719 720 721
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
722 723 724 725 726 727 728 729 730
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
731 732 733
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
734
                    # make sure all the paramerter or optimizer var have been update
735 736 737
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

738
            fluid.save(main_program, os.path.join(temp_dir.name, 'test_1'))
739 740 741 742 743 744 745

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

746 747 748
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
749
                    # make sure all the paramerter or optimizer var have been set to zero
750 751
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

752
            # fluid.load(test_program, "./test_1", None )
753
            program_state = fluid.load_program_state(
754 755
                os.path.join(temp_dir.name, 'test_1')
            )
H
hong 已提交
756 757

            program_state_1 = fluid.load_program_state(
758 759
                os.path.join(temp_dir.name, 'test_1.pdparams')
            )
H
hong 已提交
760 761

            program_state_2 = fluid.load_program_state(
762 763
                os.path.join(temp_dir.name, 'test_1.pdopt')
            )
H
hong 已提交
764 765

            program_state_3 = fluid.load_program_state(
766 767
                os.path.join(temp_dir.name, 'test_1.pdmodel')
            )
H
hong 已提交
768

769 770 771 772
            fluid.set_program_state(test_program, program_state)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
773 774 775
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
776
                    base_t = base_map[var.name]
777
                    np.testing.assert_array_equal(new_t, base_t)
778

H
hong 已提交
779 780 781 782 783 784
            # check 1
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

785 786 787
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
788
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
789 790 791 792 793 794
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_1)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
795 796 797
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
798
                    base_t = base_map[var.name]
799
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
800 801 802 803 804 805 806

            # check 2
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

807 808 809
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
810
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
811 812 813 814 815 816
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_2)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
817 818 819
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
820
                    base_t = base_map[var.name]
821
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
822 823 824 825 826 827 828

            # check 3
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

829 830 831
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
832
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
833 834 835 836 837 838
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_3)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
839 840 841
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
842
                    base_t = base_map[var.name]
843
                    np.testing.assert_array_equal(new_t, base_t)
844
            temp_dir.cleanup()
H
hong 已提交
845

846 847

class TestVariableInit(unittest.TestCase):
848
    def set_place(self):
849 850 851 852 853
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
854

855 856 857 858 859 860
    def test_variable_init(self):

        x = fluid.data(name="x", shape=[10, 10], dtype='float32')
        y = fluid.layers.fc(x, 10)
        z = fluid.layers.fc(y, 10)

861
        place = self.set_place()
862 863 864
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

865
        temp_dir = tempfile.TemporaryDirectory()
866 867 868 869
        fluid.save(
            fluid.default_main_program(),
            os.path.join(temp_dir.name, "test_path"),
        )
870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887

        def set_var(var, ndarray):
            t = var.get_tensor()
            p = t._place()
            if p.is_cpu_place():
                place = paddle.fluid.CPUPlace()
            elif p.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            else:
                p = paddle.fluid.core.Place()
                p.set_place(t._place())
                place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            t.set(ndarray, place)

        program = fluid.default_main_program()
        new_scope = fluid.core.Scope()

888
        place = self.set_place()
889
        exe = fluid.Executor(place)
890 891 892
        parameter_list = list(
            filter(fluid.io.is_parameter, program.list_vars())
        )
893

894 895 896
        fluid.core._create_loaded_parameter(
            parameter_list, new_scope, exe._default_executor
        )
897
        parameter_file_name = os.path.join(temp_dir.name, "test_path.pdparams")
898 899 900 901
        with open(parameter_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in parameter_list:
902 903 904 905 906
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, parameter_file_name
            )
907 908 909 910
            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        opt_list = list(
911 912
            filter(fluid.io.is_belong_to_optimizer, program.list_vars())
        )
913

914 915 916
        fluid.core._create_loaded_parameter(
            opt_list, new_scope, exe._default_executor
        )
917
        opt_file_name = os.path.join(temp_dir.name, "test_path.pdopt")
918 919 920 921
        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in opt_list:
922 923 924 925 926
            assert (
                v.name in load_dict
            ), "Can not find [{}] in model file [{}]".format(
                v.name, opt_file_name
            )
927 928 929 930 931 932 933

            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        base_map = {}
        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
934 935 936
                t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
T
tianshuo78520a 已提交
937
                # make sure all the paramerter or optimizer var have been update
938 939 940 941 942 943 944
                base_map[var.name] = t

        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
                new_t = np.array(new_scope.find_var(var.name).get_tensor())
                base_t = base_map[var.name]

945
                np.testing.assert_array_equal(new_t, base_t)
946
        temp_dir.cleanup()
947 948


H
hong 已提交
949 950 951 952 953
class TestLoadFromOldInterface(unittest.TestCase):
    def setUp(self):
        if os.path.exists("test_path.pdparams"):
            os.remove("test_path.pdparams")

954 955 956
        if os.path.exists("test_static_load_var_list.pdparams"):
            os.remove("test_static_load_var_list.pdparams")

957 958
        self.temp_dir = tempfile.TemporaryDirectory()

959
    def set_place(self):
960 961 962 963 964
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
965

966 967 968
    def tearDown(self):
        self.temp_dir.cleanup()

H
hong 已提交
969 970 971 972 973 974 975 976 977 978 979 980 981
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
982 983 984 985 986 987 988 989
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
990

991
            place = self.set_place()
H
hong 已提交
992 993
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
994 995 996
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
997
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
998 999 1000 1001 1002 1003
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1004 1005

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1006 1007
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1025 1026 1027 1028 1029
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1030
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1041 1042 1043 1044 1045 1046 1047 1048 1049
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1050 1051 1052
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1053
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1054 1055 1056
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1057
            # fluid.save(main_program, "./test_1")
1058
            paddle.distributed.io.save_persistables(
1059 1060
                exe, os.path.join(self.temp_dir.name, "test_path"), main_program
            )
H
hong 已提交
1061 1062 1063 1064 1065 1066 1067

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1068 1069 1070
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1071
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1072 1073
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1074 1075 1076
            fluid.load(
                main_program, os.path.join(self.temp_dir.name, "test_path"), exe
            )
H
hong 已提交
1077 1078 1079

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1080 1081 1082
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1083
                    base_t = base_map[var.name]
1084
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1085 1086 1087 1088 1089 1090 1091 1092 1093

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)
            with self.assertRaises(RuntimeError):
1094 1095 1096 1097 1098
                fluid.load(
                    main_program,
                    os.path.join(self.temp_dir.name, "test_path"),
                    exe,
                )
H
hong 已提交
1099

T
tianshuo78520a 已提交
1100
            # check unused parameter
H
hong 已提交
1101

1102 1103 1104 1105 1106
            fluid.load(
                test_clone_program,
                os.path.join(self.temp_dir.name, "test_path"),
                exe,
            )
H
hong 已提交
1107

1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120
    def test_load_from_old_interface_var_list(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1121 1122 1123 1124 1125 1126 1127 1128
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
1129

1130
            place = self.set_place()
1131 1132
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1133 1134 1135
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
1136
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1137 1138 1139 1140 1141 1142
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
1143 1144

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1145 1146
                x, y, init_hidden, init_cell
            )
1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1164 1165 1166 1167 1168
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
1169
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1170 1171 1172 1173 1174 1175 1176 1177 1178 1179
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
1180 1181 1182 1183 1184 1185 1186 1187 1188
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1189 1190 1191
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1192 1193 1194 1195
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1196
            # fluid.save(main_program, "./test_1")
1197
            paddle.distributed.io.save_persistables(
1198 1199
                exe,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1200 1201
                main_program,
            )
1202

1203
            # set var to zero
1204 1205 1206 1207 1208 1209 1210 1211
            var_list = []
            for i, var in enumerate(main_program.list_vars()):
                if isinstance(var, framework.Parameter) or var.persistable:
                    if i % 2 == 0:
                        var_list.append(var)
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1212 1213 1214
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1215 1216 1217
                    # make sure all the paramerter or optimizer var have been set to zero
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1218 1219 1220
            fluid.load(
                main_program,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
1221 1222 1223
                exe,
                var_list,
            )
1224 1225 1226
            var_list_names = [var.name for var in var_list]
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1227 1228 1229
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
1230 1231 1232
                    if var.name in var_list_names:
                        # loaded vars
                        base_t = base_map[var.name]
1233
                        np.testing.assert_array_equal(new_t, base_t)
1234
                    else:
1235
                        # not loaded vars
1236 1237
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

H
hong 已提交
1238 1239

class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
1240
    def set_place(self):
1241 1242 1243 1244 1245
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1246

H
hong 已提交
1247 1248 1249 1250 1251 1252 1253 1254 1255
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1256
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1257 1258 1259 1260

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1261 1262 1263 1264 1265 1266 1267 1268
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1269

1270
            place = self.set_place()
H
hong 已提交
1271 1272
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1273 1274 1275
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1276
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1277 1278 1279 1280 1281 1282
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1283 1284

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1285 1286
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1302 1303 1304 1305 1306
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1307
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1308 1309 1310 1311 1312 1313 1314 1315 1316 1317
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1318 1319 1320 1321 1322 1323 1324 1325 1326
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1327 1328 1329
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1330
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1331 1332
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1333
            save_dir = os.path.join(temp_dir.name, "test_path")
1334
            # fluid.save(main_program, "./test_1")
1335
            paddle.distributed.io.save_persistables(
1336 1337
                exe, save_dir, main_program, filename="model_single"
            )
H
hong 已提交
1338 1339 1340 1341 1342 1343 1344

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1345 1346 1347
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1348
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1349 1350
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1351
            file_model_path = os.path.join(save_dir, "model_single")
1352 1353 1354 1355 1356 1357
            fluid.load(
                main_program,
                file_model_path,
                exe,
                fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1358 1359 1360

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1361 1362 1363
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1364
                    base_t = base_map[var.name]
1365
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377

            # test exception
            # change shape
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)

            with self.assertRaises(RuntimeError):
1378 1379 1380 1381 1382 1383 1384 1385 1386 1387
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )

            fluid.io.save_params(
                exe, "test_path", main_program, filename="model_single"
            )
H
hong 已提交
1388
            with self.assertRaises(RuntimeError):
1389 1390 1391 1392 1393 1394
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1395 1396 1397

            # check when executor is None
            with self.assertRaises(ValueError):
1398 1399 1400 1401 1402 1403
                fluid.load(
                    main_program,
                    file_model_path,
                    None,
                    fluid.io.get_program_persistable_vars(main_program),
                )
H
hong 已提交
1404 1405 1406 1407 1408 1409 1410

            # check when var list is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, exe, None)

            # check save params, load var_list = get_program_persistable_vars
            with self.assertRaises(RuntimeError):
1411 1412 1413
                temp_var = framework.Variable(
                    main_program.global_block(), shape=[1], name="test_temp_var"
                )
H
hong 已提交
1414
                all_var_list = list(main_program.list_vars())
1415 1416 1417 1418 1419 1420
                fluid.load(
                    main_program,
                    file_model_path,
                    exe,
                    all_var_list + [temp_var],
                )
1421
        temp_dir.cleanup()
H
hong 已提交
1422 1423


H
hong 已提交
1424
class TestProgramStateOldSave(unittest.TestCase):
1425 1426
    def setUp(self):
        self.test_dygraph = True
1427 1428 1429 1430
        self.temp_dir = tempfile.TemporaryDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()
1431 1432

    def set_place(self):
1433 1434 1435 1436 1437
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1438

H
hong 已提交
1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1452 1453 1454 1455 1456 1457 1458 1459
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1460

1461
            place = self.set_place()
H
hong 已提交
1462 1463
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1464 1465 1466
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1467
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1468 1469 1470 1471 1472 1473
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1474 1475

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1476 1477
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1478 1479 1480

            test_program = fluid.default_main_program().clone(for_test=True)

1481 1482 1483 1484 1485 1486
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1503 1504 1505 1506 1507
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1508
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1509 1510 1511 1512 1513 1514 1515 1516 1517 1518
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1519 1520 1521 1522 1523 1524 1525 1526 1527
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1528 1529 1530
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1531
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1532 1533
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1534
            save_dir = os.path.join(self.temp_dir.name, "test_program_1")
1535
            paddle.distributed.io.save_persistables(exe, save_dir, main_program)
H
hong 已提交
1536 1537 1538 1539 1540 1541 1542

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1543 1544 1545
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1546
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1547 1548
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1549
            # case 1: load basic
1550
            program_state = fluid.load_program_state(save_dir)
H
hong 已提交
1551
            fluid.set_program_state(main_program, program_state)
1552 1553 1554
            self.check_in_static(main_program, base_map)

            # case 2: load with no need file
1555 1556
            def symlink_force(target, link_name):
                try:
1557
                    self.create_symlink(target, link_name)
1558 1559 1560
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        os.remove(link_name)
1561
                        self.create_symlink(target, link_name)
1562 1563 1564
                    else:
                        raise e

1565
            program_state = fluid.load_program_state(save_dir)
1566 1567
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1568

1569 1570
            # case 3: load with var_list
            program_state = fluid.load_program_state(
1571 1572
                save_dir, main_program.all_parameters()
            )
1573 1574
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1575

1576 1577 1578
        if self.test_dygraph:
            # make sure `load_program_state` can be used in dynamic graph mode
            with fluid.dygraph.guard(place):
1579
                load_state = fluid.load_program_state(save_dir)
1580
                for k, v in load_state.items():
1581
                    np.testing.assert_array_equal(base_map[k], v)
1582

1583 1584 1585 1586 1587
    def create_symlink(self, target, link_name):
        try:
            os.symlink(target, link_name)
        except AttributeError:
            import ctypes
1588

1589 1590 1591
            kernel_dll = ctypes.windll.LoadLibrary("kernel32.dll")
            kernel_dll.CreateSymbolicLinkA(target, link_name, 0)

1592 1593 1594
    def check_in_static(self, main_program, base_map):
        for var in main_program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
1595 1596 1597
                new_t = np.array(
                    fluid.global_scope().find_var(var.name).get_tensor()
                )
1598
                base_t = base_map[var.name]
1599
                np.testing.assert_array_equal(new_t, base_t)
1600

H
hong 已提交
1601 1602

class TestProgramStateOldSaveSingleModel(unittest.TestCase):
1603
    def set_place(self):
1604 1605 1606 1607 1608
        return (
            fluid.CPUPlace()
            if not core.is_compiled_with_cuda()
            else fluid.CUDAPlace(0)
        )
1609

H
hong 已提交
1610 1611 1612 1613 1614 1615 1616 1617 1618
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1619
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1620 1621 1622 1623

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1624 1625 1626 1627 1628 1629 1630 1631
            ptb_model = PtbModel(
                "ptb_model",
                hidden_size=hidden_size,
                vocab_size=vocab_size,
                num_layers=num_layers,
                num_steps=num_steps,
                init_scale=init_scale,
            )
H
hong 已提交
1632

1633
            place = self.set_place()
H
hong 已提交
1634 1635
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1636 1637 1638
            x = fluid.layers.data(
                name="x", shape=[-1, num_steps], dtype='int64'
            )
H
hong 已提交
1639
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1640 1641 1642 1643 1644 1645
            init_hidden = fluid.layers.data(
                name="init_hidden", shape=[1], dtype='float32'
            )
            init_cell = fluid.layers.data(
                name="init_cell", shape=[1], dtype='float32'
            )
H
hong 已提交
1646 1647

            static_loss, static_last_hidden, static_last_cell = ptb_model(
1648 1649
                x, y, init_hidden, init_cell
            )
H
hong 已提交
1650 1651 1652

            test_program = fluid.default_main_program().clone(for_test=True)

1653 1654 1655 1656 1657 1658
            add_1 = fluid.layers.fc(
                static_last_hidden,
                size=hidden_size,
                num_flatten_dims=2,
                bias_attr=False,
            )
H
hong 已提交
1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
1675 1676 1677 1678 1679
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
                init_cell_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32'
                )
H
hong 已提交
1680
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
1681 1682 1683 1684 1685 1686 1687 1688 1689 1690
                out = exe.run(
                    fluid.default_main_program(),
                    feed={
                        "x": x_data,
                        "y": y_data,
                        "init_hidden": init_hidden_data,
                        "init_cell": init_cell_data,
                    },
                    fetch_list=fetch_list,
                )
H
hong 已提交
1691 1692 1693 1694 1695 1696 1697 1698 1699
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1700 1701 1702
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1703
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1704 1705 1706
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1707
            save_dir = os.path.join(temp_dir.name, "test_program_2")
1708
            paddle.distributed.io.save_persistables(
1709 1710
                exe, save_dir, main_program, filename="model_1"
            )
H
hong 已提交
1711 1712 1713 1714 1715 1716 1717

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1718 1719 1720
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
T
tianshuo78520a 已提交
1721
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1722 1723
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1724
            # fluid.load(test_program, "./test_1", None )
H
hong 已提交
1725
            program_state = fluid.load_program_state(
1726
                os.path.join(save_dir, "model_1"),
1727 1728
                var_list=fluid.io.get_program_persistable_vars(main_program),
            )
H
hong 已提交
1729 1730 1731 1732
            fluid.set_program_state(main_program, program_state)

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1733 1734 1735
                    new_t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
H
hong 已提交
1736
                    base_t = base_map[var.name]
1737
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1738 1739

            with self.assertRaises(ValueError):
1740
                fluid.load_program_state(os.path.join(save_dir, "model_1"))
H
hong 已提交
1741 1742

            with self.assertRaises(TypeError):
1743 1744 1745
                fluid.load_program_state(
                    os.path.join(save_dir, "model_1"), var_list=["str"]
                )
H
hong 已提交
1746 1747 1748

            with self.assertRaises(RuntimeError):
                fluid.load_program_state(
1749
                    os.path.join(save_dir, "model_1"),
H
hong 已提交
1750 1751
                    var_list=[
                        main_program.global_block().create_var(
1752 1753 1754 1755
                            name="fake_var_name", persistable=True
                        )
                    ],
                )
1756
        temp_dir.cleanup()
H
hong 已提交
1757 1758


W
WeiXin 已提交
1759 1760 1761 1762 1763 1764 1765
class TestStaticSaveLoadPickle(unittest.TestCase):
    def test_pickle_protocol(self):
        # enable static mode
        paddle.enable_static()

        with new_program_scope():
            # create network
1766 1767 1768 1769 1770
            x = paddle.static.data(
                name="static_save_load_large_x",
                shape=[None, 10],
                dtype='float32',
            )
W
WeiXin 已提交
1771 1772 1773 1774 1775 1776 1777 1778 1779
            z = paddle.static.nn.fc(x, 10, bias_attr=False)
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()

            base_map = {}
            for var in prog.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1780 1781 1782
                    t = np.array(
                        fluid.global_scope().find_var(var.name).get_tensor()
                    )
W
WeiXin 已提交
1783 1784 1785 1786
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1787
            temp_dir = tempfile.TemporaryDirectory()
1788 1789 1790
            path = os.path.join(
                temp_dir.name, "test_static_save_load_pickle", "pickle_protocol"
            )
W
WeiXin 已提交
1791 1792 1793 1794 1795 1796 1797 1798 1799 1800

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 2.0)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 1)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 5)

1801 1802 1803
            protocols = [
                2,
            ]
W
WeiXin 已提交
1804 1805 1806 1807 1808 1809 1810
            if sys.version_info.major >= 3 and sys.version_info.minor >= 4:
                protocols += [3, 4]
            for protocol in protocols:
                paddle.fluid.save(prog, path, protocol)
                # set var to zero
                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1811 1812 1813
                        ten = (
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1814 1815
                        ten.set(np.zeros_like(np.array(ten)), place)

1816 1817 1818
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1819 1820 1821 1822 1823 1824
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

                paddle.fluid.load(prog, path)

                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1825 1826 1827
                        new_t = np.array(
                            fluid.global_scope().find_var(var.name).get_tensor()
                        )
W
WeiXin 已提交
1828
                        base_t = base_map[var.name]
1829
                        np.testing.assert_array_equal(new_t, base_t)
W
WeiXin 已提交
1830 1831


1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850
class TestSaveLoadInferenceModel(unittest.TestCase):
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()
        self.model_path = os.path.join(self.temp_dir.name, 'no_params')

    def tearDown(self):
        self.temp_dir.cleanup()

    def test_no_params(self):
        main_program = framework.Program()
        with framework.program_guard(main_program):
            x = paddle.static.data(name="x", shape=[10, 10], dtype='float32')
            y = x + x

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)

            paddle.static.save_inference_model(self.model_path, [x], [y], exe)

1851 1852 1853 1854 1855
            [
                inference_program,
                feed_target_names,
                fetch_targets,
            ] = paddle.static.load_inference_model(self.model_path, exe)
1856 1857 1858 1859 1860 1861 1862

            self.assertEqual(feed_target_names, ['x'])
            self.assertEqual(fetch_targets[0].shape, (10, 10))
            ops = [op.type for op in inference_program.block(0).ops]
            self.assertEqual(ops, ['feed', 'elementwise_add', 'scale', 'fetch'])


H
hong 已提交
1863
if __name__ == '__main__':
1864
    paddle.enable_static()
H
hong 已提交
1865
    unittest.main()