test_static_save_load.py 70.2 KB
Newer Older
H
hong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function
W
WeiXin 已提交
16
import sys
H
hong 已提交
17 18

import unittest
19
import paddle
H
hong 已提交
20 21
import paddle.fluid as fluid
import paddle.fluid.core as core
22
from paddle.nn import Embedding
H
hong 已提交
23 24 25 26
import paddle.fluid.framework as framework
from paddle.fluid.optimizer import Adam
from paddle.fluid.dygraph.base import to_variable
from test_imperative_base import new_program_scope
27
from paddle.fluid.executor import global_scope
H
hong 已提交
28 29
import numpy as np
import six
30
import pickle
H
hong 已提交
31
import os
32
import errno
33
import tempfile
H
hong 已提交
34

35 36
paddle.enable_static()

H
hong 已提交
37 38

class SimpleLSTMRNN(fluid.Layer):
39

H
hong 已提交
40 41 42 43 44 45 46
    def __init__(self,
                 name_scope,
                 hidden_size,
                 num_steps,
                 num_layers=2,
                 init_scale=0.1,
                 dropout=None):
H
hong 已提交
47
        super(SimpleLSTMRNN, self).__init__()
H
hong 已提交
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
        self._hidden_size = hidden_size
        self._num_layers = num_layers
        self._init_scale = init_scale
        self._dropout = dropout
        self._input = None
        self._num_steps = num_steps
        self.cell_array = []
        self.hidden_array = []

        self.weight_1_arr = []
        self.weight_2_arr = []
        self.bias_arr = []
        self.mask_array = []

        for i in range(self._num_layers):
            weight_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
                        low=-self._init_scale, high=self._init_scale)),
                shape=[self._hidden_size * 2, self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.UniformInitializer(
                    low=-self._init_scale, high=self._init_scale))
            self.weight_1_arr.append(self.add_parameter('w_%d' % i, weight_1))
            bias_1 = self.create_parameter(
                attr=fluid.ParamAttr(
                    initializer=fluid.initializer.UniformInitializer(
                        low=-self._init_scale, high=self._init_scale)),
                shape=[self._hidden_size * 4],
                dtype="float32",
                default_initializer=fluid.initializer.Constant(0.0))
            self.bias_arr.append(self.add_parameter('b_%d' % i, bias_1))

    def forward(self, input_embedding, init_hidden=None, init_cell=None):
        self.cell_array = []
        self.hidden_array = []

        for i in range(self._num_layers):
86 87 88 89 90 91 92 93 94 95 96 97
            pre_hidden = fluid.layers.slice(init_hidden,
                                            axes=[0],
                                            starts=[i],
                                            ends=[i + 1])
            pre_cell = fluid.layers.slice(init_cell,
                                          axes=[0],
                                          starts=[i],
                                          ends=[i + 1])
            pre_hidden = fluid.layers.reshape(pre_hidden,
                                              shape=[-1, self._hidden_size])
            pre_cell = fluid.layers.reshape(pre_cell,
                                            shape=[-1, self._hidden_size])
H
hong 已提交
98 99 100 101 102
            self.hidden_array.append(pre_hidden)
            self.cell_array.append(pre_cell)

        res = []
        for index in range(self._num_steps):
103 104 105 106 107 108
            self._input = fluid.layers.slice(input_embedding,
                                             axes=[1],
                                             starts=[index],
                                             ends=[index + 1])
            self._input = fluid.layers.reshape(self._input,
                                               shape=[-1, self._hidden_size])
H
hong 已提交
109 110 111 112 113 114 115 116 117 118
            for k in range(self._num_layers):
                pre_hidden = self.hidden_array[k]
                pre_cell = self.cell_array[k]
                weight_1 = self.weight_1_arr[k]
                bias = self.bias_arr[k]

                nn = fluid.layers.concat([self._input, pre_hidden], 1)
                gate_input = fluid.layers.matmul(x=nn, y=weight_1)

                gate_input = fluid.layers.elementwise_add(gate_input, bias)
119 120 121
                i, j, f, o = fluid.layers.split(gate_input,
                                                num_or_sections=4,
                                                dim=-1)
H
hong 已提交
122 123 124 125 126 127 128 129 130 131 132 133 134
                c = pre_cell * fluid.layers.sigmoid(f) + fluid.layers.sigmoid(
                    i) * fluid.layers.tanh(j)
                m = fluid.layers.tanh(c) * fluid.layers.sigmoid(o)
                self.hidden_array[k] = m
                self.cell_array[k] = c
                self._input = m

                if self._dropout is not None and self._dropout > 0.0:
                    self._input = fluid.layers.dropout(
                        self._input,
                        dropout_prob=self._dropout,
                        dropout_implementation='upscale_in_train')
            res.append(
135 136
                fluid.layers.reshape(self._input,
                                     shape=[1, -1, self._hidden_size]))
H
hong 已提交
137 138 139 140 141 142 143 144 145 146 147 148 149 150
        real_res = fluid.layers.concat(res, 0)
        real_res = fluid.layers.transpose(x=real_res, perm=[1, 0, 2])
        last_hidden = fluid.layers.concat(self.hidden_array, 1)
        last_hidden = fluid.layers.reshape(
            last_hidden, shape=[-1, self._num_layers, self._hidden_size])
        last_hidden = fluid.layers.transpose(x=last_hidden, perm=[1, 0, 2])
        last_cell = fluid.layers.concat(self.cell_array, 1)
        last_cell = fluid.layers.reshape(
            last_cell, shape=[-1, self._num_layers, self._hidden_size])
        last_cell = fluid.layers.transpose(x=last_cell, perm=[1, 0, 2])
        return real_res, last_hidden, last_cell


class PtbModel(fluid.Layer):
151

H
hong 已提交
152 153 154 155 156 157 158 159
    def __init__(self,
                 name_scope,
                 hidden_size,
                 vocab_size,
                 num_layers=2,
                 num_steps=20,
                 init_scale=0.1,
                 dropout=None):
H
hong 已提交
160
        super(PtbModel, self).__init__()
H
hong 已提交
161 162 163 164 165 166
        self.hidden_size = hidden_size
        self.vocab_size = vocab_size
        self.init_scale = init_scale
        self.num_layers = num_layers
        self.num_steps = num_steps
        self.dropout = dropout
167 168 169 170 171 172
        self.simple_lstm_rnn = SimpleLSTMRNN(self.full_name(),
                                             hidden_size,
                                             num_steps,
                                             num_layers=num_layers,
                                             init_scale=init_scale,
                                             dropout=dropout)
173 174 175 176
        self.embedding = paddle.nn.Embedding(
            num_embeddings=vocab_size,
            embedding_dim=hidden_size,
            weight_attr=fluid.ParamAttr(
H
hong 已提交
177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
                name='embedding_para',
                initializer=fluid.initializer.UniformInitializer(
                    low=-init_scale, high=init_scale)))
        self.softmax_weight = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.hidden_size, self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
                low=-self.init_scale, high=self.init_scale))
        self.softmax_bias = self.create_parameter(
            attr=fluid.ParamAttr(),
            shape=[self.vocab_size],
            dtype="float32",
            default_initializer=fluid.initializer.UniformInitializer(
                low=-self.init_scale, high=self.init_scale))

    def forward(self, input, label, init_hidden, init_cell):
        init_h = fluid.layers.reshape(
            init_hidden, shape=[self.num_layers, -1, self.hidden_size])

        init_c = fluid.layers.reshape(
            init_cell, shape=[self.num_layers, -1, self.hidden_size])

200 201
        # NPU 'tok_k' kernel only support `int32` dtype, so cast `input` from `int64` to `int32`.
        input = fluid.layers.cast(input, "int32")
H
hong 已提交
202 203 204 205 206 207 208 209
        x_emb = self.embedding(input)
        x_emb = fluid.layers.reshape(
            x_emb, shape=[-1, self.num_steps, self.hidden_size])
        if self.dropout is not None and self.dropout > 0.0:
            x_emb = fluid.layers.dropout(
                x_emb,
                dropout_prob=self.drop_out,
                dropout_implementation='upscale_in_train')
210 211
        rnn_out, last_hidden, last_cell = self.simple_lstm_rnn(
            x_emb, init_h, init_c)
H
hong 已提交
212 213 214 215 216

        rnn_out = fluid.layers.reshape(
            rnn_out, shape=[-1, self.num_steps, self.hidden_size])
        projection = fluid.layers.matmul(rnn_out, self.softmax_weight)
        projection = fluid.layers.elementwise_add(projection, self.softmax_bias)
217 218 219 220 221
        projection = fluid.layers.reshape(projection,
                                          shape=[-1, self.vocab_size])
        loss = fluid.layers.softmax_with_cross_entropy(logits=projection,
                                                       label=label,
                                                       soft_label=False)
H
hong 已提交
222 223 224 225 226 227 228
        loss = fluid.layers.reshape(loss, shape=[-1, self.num_steps])
        loss = fluid.layers.reduce_mean(loss, dim=[0])
        loss = fluid.layers.reduce_sum(loss)

        return loss, last_hidden, last_cell


229
class TestSaveLoadBase(unittest.TestCase):
230

231
    def set_place(self):
232 233
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
234

H
hong 已提交
235 236 237 238 239 240 241 242 243
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
244
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
245 246 247 248

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
249 250 251 252 253 254
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
255

256
            place = self.set_place()
H
hong 已提交
257 258
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
259 260 261
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
262
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
263 264 265 266 267 268
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
288 289
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
307
                if isinstance(var, framework.Parameter) or var.persistable:
308 309
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
310
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
311 312 313
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

314
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
315 316 317

            # set var to zero
            for var in main_program.list_vars():
318
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
319 320 321
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

322 323
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
324
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
325 326
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

327 328
            fluid.load(main_program,
                       os.path.join(temp_dir.name, "test_1.pdparams"), exe)
H
hong 已提交
329 330

            for var in main_program.list_vars():
331
                if isinstance(var, framework.Parameter) or var.persistable:
332 333
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
334
                    base_t = base_map[var.name]
335
                    np.testing.assert_array_equal(new_t, base_t)
336
            temp_dir.cleanup()
H
hong 已提交
337 338


339
class TestSaveLoadPartial(unittest.TestCase):
340

341
    def set_place(self):
342 343
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
344

H
hong 已提交
345 346 347 348 349 350 351 352 353
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
354
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
355 356 357 358

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
359 360 361 362 363 364
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
365

366
            place = self.set_place()
H
hong 已提交
367 368
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
369 370 371
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
372
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
373 374 375 376 377 378
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_program = fluid.default_main_program().clone(for_test=True)

            add_1 = fluid.layers.fc(static_last_hidden,
                                    size=hidden_size,
                                    num_flatten_dims=2,
                                    bias_attr=False)

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
406 407
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
425
                if isinstance(var, framework.Parameter) or var.persistable:
426 427
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
428
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
429 430 431
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

432
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
H
hong 已提交
433 434 435

            # set var to zero
            for var in main_program.list_vars():
436
                if isinstance(var, framework.Parameter) or var.persistable:
H
hong 已提交
437 438 439
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

440 441
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
442
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
443 444
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

445 446
            fluid.load(test_program, os.path.join(temp_dir.name,
                                                  "test_1.pdopt"), None)
H
hong 已提交
447 448

            for var in test_program.list_vars():
449
                if isinstance(var, framework.Parameter) or var.persistable:
450 451
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
452
                    base_t = base_map[var.name]
453
                    np.testing.assert_array_equal(new_t, base_t)
454 455 456
            fluid.load(test_program,
                       os.path.join(temp_dir.name, "test_1.pdmodel"), None)
            temp_dir.cleanup()
H
hong 已提交
457 458


459
class TestSaveLoadSetStateDict(unittest.TestCase):
460

461
    def set_place(self):
462 463
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
464

465 466 467 468 469 470 471 472 473
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
474
        temp_dir = tempfile.TemporaryDirectory()
475 476 477 478

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
479 480 481 482 483 484
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
485

486
            place = self.set_place()
487 488
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
489 490 491
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
492
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
493 494 495 496 497 498
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
518 519
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
538 539
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
540
                    # make sure all the paramerter or optimizer var have been update
541 542 543
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

544
            fluid.save(main_program, os.path.join(temp_dir.name, "test_1"))
545 546 547 548 549 550 551

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

552 553
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
554
                    # make sure all the paramerter or optimizer var have been set to zero
555 556
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

557
            fluid.load(main_program, os.path.join(temp_dir.name, "test_1"), exe)
558 559 560

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
561 562
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
563
                    base_t = base_map[var.name]
564
                    np.testing.assert_array_equal(new_t, base_t)
565
            temp_dir.cleanup()
566 567 568


class TestProgramStatePartial(unittest.TestCase):
569

570
    def set_place(self):
571 572
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
573

574 575 576 577 578 579 580 581 582
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
583
        temp_dir = tempfile.TemporaryDirectory()
584 585 586 587

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
588 589 590 591 592 593
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
594

595
            place = self.set_place()
596 597
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
598 599 600
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
601
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
602 603 604 605 606 607
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_program = fluid.default_main_program().clone(for_test=True)

            add_1 = fluid.layers.fc(static_last_hidden,
                                    size=hidden_size,
                                    num_flatten_dims=2,
                                    bias_attr=False)

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
635 636
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
655 656
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
657
                    # make sure all the paramerter or optimizer var have been update
658 659 660
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

661
            fluid.save(main_program, os.path.join(temp_dir.name, 'test_1'))
662 663 664 665 666 667 668

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

669 670
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
671
                    # make sure all the paramerter or optimizer var have been set to zero
672 673 674
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            #fluid.load(test_program, "./test_1", None )
675
            program_state = fluid.load_program_state(
676
                os.path.join(temp_dir.name, 'test_1'))
H
hong 已提交
677 678

            program_state_1 = fluid.load_program_state(
679
                os.path.join(temp_dir.name, 'test_1.pdparams'))
H
hong 已提交
680 681

            program_state_2 = fluid.load_program_state(
682
                os.path.join(temp_dir.name, 'test_1.pdopt'))
H
hong 已提交
683 684

            program_state_3 = fluid.load_program_state(
685
                os.path.join(temp_dir.name, 'test_1.pdmodel'))
H
hong 已提交
686

687 688 689 690
            fluid.set_program_state(test_program, program_state)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
691 692
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
693
                    base_t = base_map[var.name]
694
                    np.testing.assert_array_equal(new_t, base_t)
695

H
hong 已提交
696 697 698 699 700 701
            # check 1
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

702 703
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
704
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
705 706 707 708 709 710
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_1)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
711 712
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
713
                    base_t = base_map[var.name]
714
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
715 716 717 718 719 720 721

            # check 2
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

722 723
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
724
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
725 726 727 728 729 730
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_2)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
731 732
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
733
                    base_t = base_map[var.name]
734
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
735 736 737 738 739 740 741

            # check 3
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

742 743
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
744
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
745 746 747 748 749 750
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            fluid.set_program_state(test_program, program_state_3)

            for var in test_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
751 752
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
753
                    base_t = base_map[var.name]
754
                    np.testing.assert_array_equal(new_t, base_t)
755
            temp_dir.cleanup()
H
hong 已提交
756

757 758

class TestVariableInit(unittest.TestCase):
759

760
    def set_place(self):
761 762
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
763

764 765 766 767 768 769
    def test_variable_init(self):

        x = fluid.data(name="x", shape=[10, 10], dtype='float32')
        y = fluid.layers.fc(x, 10)
        z = fluid.layers.fc(y, 10)

770
        place = self.set_place()
771 772 773
        exe = fluid.Executor(place)
        exe.run(fluid.default_startup_program())

774 775 776
        temp_dir = tempfile.TemporaryDirectory()
        fluid.save(fluid.default_main_program(),
                   os.path.join(temp_dir.name, "test_path"))
777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794

        def set_var(var, ndarray):
            t = var.get_tensor()
            p = t._place()
            if p.is_cpu_place():
                place = paddle.fluid.CPUPlace()
            elif p.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            else:
                p = paddle.fluid.core.Place()
                p.set_place(t._place())
                place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            t.set(ndarray, place)

        program = fluid.default_main_program()
        new_scope = fluid.core.Scope()

795
        place = self.set_place()
796
        exe = fluid.Executor(place)
797 798
        parameter_list = list(filter(fluid.io.is_parameter,
                                     program.list_vars()))
799 800 801

        fluid.core._create_loaded_parameter(parameter_list, new_scope,
                                            exe._default_executor)
802
        parameter_file_name = os.path.join(temp_dir.name, "test_path.pdparams")
803 804 805 806 807 808 809 810 811 812 813 814 815 816 817
        with open(parameter_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in parameter_list:
            assert v.name in load_dict, \
                "Can not find [{}] in model file [{}]".format(
                    v.name, parameter_file_name)
            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        opt_list = list(
            filter(fluid.io.is_belong_to_optimizer, program.list_vars()))

        fluid.core._create_loaded_parameter(opt_list, new_scope,
                                            exe._default_executor)
818
        opt_file_name = os.path.join(temp_dir.name, "test_path.pdopt")
819 820 821 822 823 824 825 826 827 828 829 830 831 832
        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)

        for v in opt_list:
            assert v.name in load_dict, \
                "Can not find [{}] in model file [{}]".format(
                    v.name, opt_file_name)

            new_v = new_scope.find_var(v.name)
            set_var(new_v, load_dict[v.name])

        base_map = {}
        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
833 834
                t = np.array(fluid.global_scope().find_var(
                    var.name).get_tensor())
T
tianshuo78520a 已提交
835
                # make sure all the paramerter or optimizer var have been update
836 837 838 839 840 841 842
                base_map[var.name] = t

        for var in program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
                new_t = np.array(new_scope.find_var(var.name).get_tensor())
                base_t = base_map[var.name]

843
                np.testing.assert_array_equal(new_t, base_t)
844
        temp_dir.cleanup()
845 846


H
hong 已提交
847
class TestLoadFromOldInterface(unittest.TestCase):
848

H
hong 已提交
849 850 851 852
    def setUp(self):
        if os.path.exists("test_path.pdparams"):
            os.remove("test_path.pdparams")

853 854 855
        if os.path.exists("test_static_load_var_list.pdparams"):
            os.remove("test_static_load_var_list.pdparams")

856 857
        self.temp_dir = tempfile.TemporaryDirectory()

858
    def set_place(self):
859 860
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
861

862 863 864
    def tearDown(self):
        self.temp_dir.cleanup()

H
hong 已提交
865 866 867 868 869 870 871 872 873 874 875 876 877
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
878 879 880 881 882 883
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
884

885
            place = self.set_place()
H
hong 已提交
886 887
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
888 889 890
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
891
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
892 893 894 895 896 897
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
919 920
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
939 940
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
941
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
942 943 944 945
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

            #fluid.save(main_program, "./test_1")
946 947 948
            fluid.io.save_persistables(
                exe, os.path.join(self.temp_dir.name, "test_path"),
                main_program)
H
hong 已提交
949 950 951 952 953 954 955

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

956 957
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
958
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
959 960
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

961 962
            fluid.load(main_program,
                       os.path.join(self.temp_dir.name, "test_path"), exe)
H
hong 已提交
963 964 965

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
966 967
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
968
                    base_t = base_map[var.name]
969
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
970 971 972 973 974 975 976 977 978

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)
            with self.assertRaises(RuntimeError):
979 980
                fluid.load(main_program,
                           os.path.join(self.temp_dir.name, "test_path"), exe)
H
hong 已提交
981

T
tianshuo78520a 已提交
982
            # check unused parameter
H
hong 已提交
983

984 985
            fluid.load(test_clone_program,
                       os.path.join(self.temp_dir.name, "test_path"), exe)
H
hong 已提交
986

987 988 989 990 991 992 993 994 995 996 997 998 999
    def test_load_from_old_interface_var_list(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1000 1001 1002 1003 1004 1005
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
1006

1007
            place = self.set_place()
1008 1009
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1010 1011 1012
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
1013
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1014 1015 1016 1017 1018 1019
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_clone_program = fluid.default_main_program().clone()
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
1041 1042
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1061 1062
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
1063 1064 1065 1066 1067
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

            #fluid.save(main_program, "./test_1")
1068 1069 1070 1071
            fluid.io.save_persistables(
                exe,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
                main_program)
1072

1073
            # set var to zero
1074 1075 1076 1077 1078 1079 1080 1081
            var_list = []
            for i, var in enumerate(main_program.list_vars()):
                if isinstance(var, framework.Parameter) or var.persistable:
                    if i % 2 == 0:
                        var_list.append(var)
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1082 1083
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
1084 1085 1086
                    # make sure all the paramerter or optimizer var have been set to zero
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1087 1088 1089 1090
            fluid.load(
                main_program,
                os.path.join(self.temp_dir.name, "test_static_load_var_list"),
                exe, var_list)
1091 1092 1093
            var_list_names = [var.name for var in var_list]
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1094 1095
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
1096 1097 1098
                    if var.name in var_list_names:
                        # loaded vars
                        base_t = base_map[var.name]
1099
                        np.testing.assert_array_equal(new_t, base_t)
1100 1101 1102 1103
                    else:
                        #not loaded vars
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

H
hong 已提交
1104 1105

class TestLoadFromOldInterfaceSingleFile(unittest.TestCase):
1106

1107
    def set_place(self):
1108 1109
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
1110

H
hong 已提交
1111 1112 1113 1114 1115 1116 1117 1118 1119
    def test_load_from_old_interface(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1120
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1121 1122 1123 1124

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1125 1126 1127 1128 1129 1130
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
1131

1132
            place = self.set_place()
H
hong 已提交
1133 1134
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1135 1136 1137
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
1138
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1139 1140 1141 1142 1143 1144
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)
            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
1164 1165
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1184 1185
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1186
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1187 1188
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1189
            save_dir = os.path.join(temp_dir.name, "test_path")
H
hong 已提交
1190
            #fluid.save(main_program, "./test_1")
1191
            fluid.io.save_persistables(exe,
1192
                                       save_dir,
1193 1194
                                       main_program,
                                       filename="model_single")
H
hong 已提交
1195 1196 1197 1198 1199 1200 1201

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1202 1203
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1204
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1205 1206
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1207
            file_model_path = os.path.join(save_dir, "model_single")
H
hong 已提交
1208 1209 1210 1211 1212
            fluid.load(main_program, file_model_path, exe,
                       fluid.io.get_program_persistable_vars(main_program))

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1213 1214
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
1215
                    base_t = base_map[var.name]
1216
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231

            # test exception
            # change shape
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    old_shape = np.array(ten).shape
                    new_shape = [e + 10 for e in old_shape]

                    var.desc.set_shape(new_shape)

            with self.assertRaises(RuntimeError):
                fluid.load(main_program, file_model_path, exe,
                           fluid.io.get_program_persistable_vars(main_program))

1232 1233 1234 1235
            fluid.io.save_params(exe,
                                 "test_path",
                                 main_program,
                                 filename="model_single")
H
hong 已提交
1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250
            with self.assertRaises(RuntimeError):
                fluid.load(main_program, file_model_path, exe,
                           fluid.io.get_program_persistable_vars(main_program))

            # check when executor is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, None,
                           fluid.io.get_program_persistable_vars(main_program))

            # check when var list is None
            with self.assertRaises(ValueError):
                fluid.load(main_program, file_model_path, exe, None)

            # check save params, load var_list = get_program_persistable_vars
            with self.assertRaises(RuntimeError):
1251 1252 1253
                temp_var = framework.Variable(main_program.global_block(),
                                              shape=[1],
                                              name="test_temp_var")
H
hong 已提交
1254 1255 1256
                all_var_list = list(main_program.list_vars())
                fluid.load(main_program, file_model_path, exe,
                           all_var_list + [temp_var])
1257
        temp_dir.cleanup()
H
hong 已提交
1258 1259


H
hong 已提交
1260
class TestProgramStateOldSave(unittest.TestCase):
1261

1262 1263
    def setUp(self):
        self.test_dygraph = True
1264 1265 1266 1267
        self.temp_dir = tempfile.TemporaryDirectory()

    def tearDown(self):
        self.temp_dir.cleanup()
1268 1269

    def set_place(self):
1270 1271
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
1272

H
hong 已提交
1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1286 1287 1288 1289 1290 1291
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
1292

1293
            place = self.set_place()
H
hong 已提交
1294 1295
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1296 1297 1298
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
1299
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1300 1301 1302 1303 1304 1305
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_program = fluid.default_main_program().clone(for_test=True)

            add_1 = fluid.layers.fc(static_last_hidden,
                                    size=hidden_size,
                                    num_flatten_dims=2,
                                    bias_attr=False)

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
1333 1334
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1353 1354
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1355
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1356 1357
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t
1358 1359
            save_dir = os.path.join(self.temp_dir.name, "test_program_1")
            fluid.io.save_persistables(exe, save_dir, main_program)
H
hong 已提交
1360 1361 1362 1363 1364 1365 1366

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1367 1368
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1369
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1370 1371
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

1372
            # case 1: load basic
1373
            program_state = fluid.load_program_state(save_dir)
H
hong 已提交
1374
            fluid.set_program_state(main_program, program_state)
1375 1376 1377
            self.check_in_static(main_program, base_map)

            # case 2: load with no need file
1378 1379
            def symlink_force(target, link_name):
                try:
1380
                    self.create_symlink(target, link_name)
1381 1382 1383
                except OSError as e:
                    if e.errno == errno.EEXIST:
                        os.remove(link_name)
1384
                        self.create_symlink(target, link_name)
1385 1386 1387
                    else:
                        raise e

1388
            program_state = fluid.load_program_state(save_dir)
1389 1390
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1391

1392 1393
            # case 3: load with var_list
            program_state = fluid.load_program_state(
1394
                save_dir, main_program.all_parameters())
1395 1396
            fluid.set_program_state(main_program, program_state)
            self.check_in_static(main_program, base_map)
H
hong 已提交
1397

1398 1399 1400
        if self.test_dygraph:
            # make sure `load_program_state` can be used in dynamic graph mode
            with fluid.dygraph.guard(place):
1401
                load_state = fluid.load_program_state(save_dir)
1402
                for k, v in load_state.items():
1403
                    np.testing.assert_array_equal(base_map[k], v)
1404

1405 1406 1407 1408 1409 1410 1411 1412
    def create_symlink(self, target, link_name):
        try:
            os.symlink(target, link_name)
        except AttributeError:
            import ctypes
            kernel_dll = ctypes.windll.LoadLibrary("kernel32.dll")
            kernel_dll.CreateSymbolicLinkA(target, link_name, 0)

1413 1414 1415
    def check_in_static(self, main_program, base_map):
        for var in main_program.list_vars():
            if isinstance(var, framework.Parameter) or var.persistable:
1416 1417
                new_t = np.array(fluid.global_scope().find_var(
                    var.name).get_tensor())
1418
                base_t = base_map[var.name]
1419
                np.testing.assert_array_equal(new_t, base_t)
1420

H
hong 已提交
1421 1422

class TestProgramStateOldSaveSingleModel(unittest.TestCase):
1423

1424
    def set_place(self):
1425 1426
        return fluid.CPUPlace(
        ) if not core.is_compiled_with_cuda() else fluid.CUDAPlace(0)
1427

H
hong 已提交
1428 1429 1430 1431 1432 1433 1434 1435 1436
    def test_ptb_rnn_cpu_float32(self):
        seed = 90
        hidden_size = 10
        vocab_size = 1000
        num_layers = 1
        num_steps = 3
        init_scale = 0.1
        batch_size = 4
        batch_num = 200
1437
        temp_dir = tempfile.TemporaryDirectory()
H
hong 已提交
1438 1439 1440 1441

        with new_program_scope():
            fluid.default_startup_program().random_seed = seed
            fluid.default_main_program().random_seed = seed
1442 1443 1444 1445 1446 1447
            ptb_model = PtbModel("ptb_model",
                                 hidden_size=hidden_size,
                                 vocab_size=vocab_size,
                                 num_layers=num_layers,
                                 num_steps=num_steps,
                                 init_scale=init_scale)
H
hong 已提交
1448

1449
            place = self.set_place()
H
hong 已提交
1450 1451
            exe = fluid.Executor(place)
            sgd = Adam(learning_rate=1e-3)
1452 1453 1454
            x = fluid.layers.data(name="x",
                                  shape=[-1, num_steps],
                                  dtype='int64')
H
hong 已提交
1455
            y = fluid.layers.data(name="y", shape=[-1, 1], dtype='float32')
1456 1457 1458 1459 1460 1461
            init_hidden = fluid.layers.data(name="init_hidden",
                                            shape=[1],
                                            dtype='float32')
            init_cell = fluid.layers.data(name="init_cell",
                                          shape=[1],
                                          dtype='float32')
H
hong 已提交
1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488

            static_loss, static_last_hidden, static_last_cell = ptb_model(
                x, y, init_hidden, init_cell)

            test_program = fluid.default_main_program().clone(for_test=True)

            add_1 = fluid.layers.fc(static_last_hidden,
                                    size=hidden_size,
                                    num_flatten_dims=2,
                                    bias_attr=False)

            sgd.minimize(static_loss)
            static_param_updated = dict()
            static_param_init = dict()

            out = exe.run(framework.default_startup_program())

            static_loss_value = None
            static_last_cell_value = None
            static_last_hidden_value = None
            for i in range(batch_num):
                x_data = np.arange(12).reshape(4, 3).astype('int64')
                y_data = np.arange(1, 13).reshape(4, 3).astype('int64')
                x_data = x_data.reshape((-1, num_steps, 1))
                y_data = y_data.reshape((-1, 1))
                init_hidden_data = np.zeros(
                    (num_layers, batch_size, hidden_size), dtype='float32')
1489 1490
                init_cell_data = np.zeros((num_layers, batch_size, hidden_size),
                                          dtype='float32')
H
hong 已提交
1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508
                fetch_list = [static_loss, static_last_hidden, static_last_cell]
                out = exe.run(fluid.default_main_program(),
                              feed={
                                  "x": x_data,
                                  "y": y_data,
                                  "init_hidden": init_hidden_data,
                                  "init_cell": init_cell_data
                              },
                              fetch_list=fetch_list)
                static_loss_value = out[0]
                static_last_hidden_value = out[1]
                static_last_cell_value = out[2]

            # get value before save
            main_program = framework.default_main_program()
            base_map = {}
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1509 1510
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1511
                    # make sure all the paramerter or optimizer var have been update
H
hong 已提交
1512 1513 1514
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1515
            save_dir = os.path.join(temp_dir.name, "test_program_2")
1516
            fluid.io.save_persistables(exe,
1517
                                       save_dir,
1518 1519
                                       main_program,
                                       filename="model_1")
H
hong 已提交
1520 1521 1522 1523 1524 1525 1526

            # set var to zero
            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
                    ten = fluid.global_scope().find_var(var.name).get_tensor()
                    ten.set(np.zeros_like(np.array(ten)), place)

1527 1528
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
T
tianshuo78520a 已提交
1529
                    # make sure all the paramerter or optimizer var have been set to zero
H
hong 已提交
1530 1531 1532 1533
                    self.assertTrue(np.sum(np.abs(new_t)) == 0)

            #fluid.load(test_program, "./test_1", None )
            program_state = fluid.load_program_state(
1534
                os.path.join(save_dir, "model_1"),
H
hong 已提交
1535 1536 1537 1538 1539
                var_list=fluid.io.get_program_persistable_vars(main_program))
            fluid.set_program_state(main_program, program_state)

            for var in main_program.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1540 1541
                    new_t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
H
hong 已提交
1542
                    base_t = base_map[var.name]
1543
                    np.testing.assert_array_equal(new_t, base_t)
H
hong 已提交
1544 1545

            with self.assertRaises(ValueError):
1546
                fluid.load_program_state(os.path.join(save_dir, "model_1"))
H
hong 已提交
1547 1548

            with self.assertRaises(TypeError):
1549
                fluid.load_program_state(os.path.join(save_dir, "model_1"),
1550
                                         var_list=["str"])
H
hong 已提交
1551 1552 1553

            with self.assertRaises(RuntimeError):
                fluid.load_program_state(
1554
                    os.path.join(save_dir, "model_1"),
H
hong 已提交
1555 1556 1557 1558
                    var_list=[
                        main_program.global_block().create_var(
                            name="fake_var_name", persistable=True)
                    ])
1559
        temp_dir.cleanup()
H
hong 已提交
1560 1561


W
WeiXin 已提交
1562
class TestStaticSaveLoadPickle(unittest.TestCase):
1563

W
WeiXin 已提交
1564 1565 1566 1567 1568 1569
    def test_pickle_protocol(self):
        # enable static mode
        paddle.enable_static()

        with new_program_scope():
            # create network
1570 1571 1572
            x = paddle.static.data(name="static_save_load_large_x",
                                   shape=[None, 10],
                                   dtype='float32')
W
WeiXin 已提交
1573 1574 1575 1576 1577 1578 1579 1580 1581
            z = paddle.static.nn.fc(x, 10, bias_attr=False)
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()

            base_map = {}
            for var in prog.list_vars():
                if isinstance(var, framework.Parameter) or var.persistable:
1582 1583
                    t = np.array(fluid.global_scope().find_var(
                        var.name).get_tensor())
W
WeiXin 已提交
1584 1585 1586 1587
                    # make sure all the paramerter or optimizer var have been update
                    self.assertTrue(np.sum(np.abs(t)) != 0)
                    base_map[var.name] = t

1588 1589
            temp_dir = tempfile.TemporaryDirectory()
            path = os.path.join(temp_dir.name, "test_static_save_load_pickle",
W
WeiXin 已提交
1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600
                                "pickle_protocol")

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 2.0)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 1)

            with self.assertRaises(ValueError):
                paddle.fluid.save(prog, path, 5)

1601 1602 1603
            protocols = [
                2,
            ]
W
WeiXin 已提交
1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614
            if sys.version_info.major >= 3 and sys.version_info.minor >= 4:
                protocols += [3, 4]
            for protocol in protocols:
                paddle.fluid.save(prog, path, protocol)
                # set var to zero
                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
                        ten = fluid.global_scope().find_var(
                            var.name).get_tensor()
                        ten.set(np.zeros_like(np.array(ten)), place)

1615 1616
                        new_t = np.array(fluid.global_scope().find_var(
                            var.name).get_tensor())
W
WeiXin 已提交
1617 1618 1619 1620 1621 1622
                        self.assertTrue(np.sum(np.abs(new_t)) == 0)

                paddle.fluid.load(prog, path)

                for var in prog.list_vars():
                    if isinstance(var, framework.Parameter) or var.persistable:
1623 1624
                        new_t = np.array(fluid.global_scope().find_var(
                            var.name).get_tensor())
W
WeiXin 已提交
1625
                        base_t = base_map[var.name]
1626
                        np.testing.assert_array_equal(new_t, base_t)
W
WeiXin 已提交
1627 1628


H
hong 已提交
1629
if __name__ == '__main__':
1630
    paddle.enable_static()
H
hong 已提交
1631
    unittest.main()