io.py 39.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import os
import collections
import pickle
import warnings
21
import sys
W
WeiXin 已提交
22
import numpy as np
T
tianshuo78520a 已提交
23
import copyreg
24 25 26 27 28
import paddle

# deprecated module import
from paddle import fluid
from paddle.fluid import core
29 30
from paddle.fluid.io import _unpack_saved_dict, _pack_loaded_dict, _pickle_loads_mac
from paddle.fluid.io import _legacy_save as _legacy_static_save
31
from paddle.fluid.io import _open_file_buffer, _is_file_path, _is_memory_buffer
32

W
WeiXin 已提交
33
from paddle.fluid.framework import Variable, _varbase_creator, _dygraph_tracer, in_dygraph_mode, ParamBase, _current_expected_place, Program
34 35 36
from paddle.fluid.dygraph.jit import _SaveLoadConfig
from paddle.fluid.dygraph.io import _construct_program_holders, _construct_params_and_buffers
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX, INFER_PARAMS_INFO_SUFFIX
37

38 39
__all__ = []

40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61

def _build_saved_state_dict(state_dict):
    save_dict = {}
    name_table = {}
    for key, value in state_dict.items():
        if isinstance(value, (Variable, core.VarBase)):
            save_dict[key] = value.numpy()
            name_table[key] = value.name
        else:
            save_dict[key] = value
    save_dict["StructuredToParameterName@@"] = name_table

    return save_dict


def _load_state_dict_from_save_inference_model(model_path, config):
    # 1. load program desc & construct _ProgramHolder
    programs = _construct_program_holders(model_path, config.model_filename)

    # 2. load layer parameters & buffers
    with fluid.dygraph.guard():
        persistable_var_dict = _construct_params_and_buffers(
62
            model_path, programs, config.params_filename, append_suffix=False)
63 64 65 66 67 68

        # 3. construct state_dict
        load_param_dict = dict()
        for var_name in persistable_var_dict:
            load_param_dict[var_name] = persistable_var_dict[var_name].numpy()

69 70 71
        # if *.info exists, we can recover structured_name
        var_info_filename = str(config.params_filename) + ".info"
        var_info_path = os.path.join(model_path, var_info_filename)
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
        if os.path.exists(var_info_path):
            with open(var_info_path, 'rb') as f:
                extra_var_info = pickle.load(f)
            structured_para_dict = dict()
            for var_name in load_param_dict:
                structured_name = extra_var_info[var_name].get(
                    'structured_name', None)
                assert structured_name is not None, "Cannot find saved variable (%s)'s structured name in saved model." % var_name
                structured_para_dict[structured_name] = load_param_dict[
                    var_name]
            load_param_dict = structured_para_dict

    return load_param_dict


def _load_state_dict_from_save_params(model_path):
    # Try to load all the files in the directory in VarBase format, 
    # the file name is used as the name of VarBase
    load_var_list = []

    # 1. load file names
    var_name_list = []
    for root, _, files in os.walk(model_path):
        for filename in files:
            file_path = os.path.join(root, filename)
            tmp_var_name = os.path.relpath(file_path, model_path)
            var_name = tmp_var_name.replace("\\", "/")
            var_name_list.append(var_name)

    # 2. create and load VarBase
    with fluid.dygraph.guard():
        for name in var_name_list:
            new_var = _varbase_creator(name=name, persistable=True)
            _dygraph_tracer().trace_op(
                type='load',
                inputs={},
                outputs={'Out': new_var},
                attrs={'file_path': os.path.join(model_path, name)})
            load_var_list.append(new_var)

    # 3. construct state_dict
    load_param_dict = dict()
    for var in load_var_list:
        load_param_dict[var.name] = var.numpy()

    return load_param_dict


120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182
# NOTE(chenweihang): [ Handling of use cases of API paddle.load ]
# `paddle.load` may be used to load saved results of:
# 1. Expected cases:
#   - need [full filename] when loading
#       - paddle.save
#       - paddle.static.save
#       - paddle.fluid.save_dygraph
#   - need [prefix] when loading [compatible for paddle 2.x]
#       - paddle.jit.save
#       - paddle.static.save_inference_model
#   - need [directory] when loading [compatible for paddle 1.x]
#       - paddle.fluid.io.save_inference_model
#       - paddle.fluid.io.save_params/save_persistable
# 2. Error cases:
#   - no error case
def _build_load_path_and_config(path, config):
    # NOTE(chenweihang): If both [prefix save format] and [directory save format] exist,
    # raise error, avoid confusing behavior
    prefix_format_path = path + INFER_MODEL_SUFFIX
    prefix_format_exist = os.path.exists(prefix_format_path)
    directory_format_exist = os.path.isdir(path)
    if prefix_format_exist and directory_format_exist:
        raise ValueError(
            "The %s.pdmodel and %s directory exist at the same time, "
            "don't know which one to load, please make sure that the specified target "
            "of ``path`` is unique." % (path, path))
    elif not prefix_format_exist and not directory_format_exist:
        error_msg = "The ``path`` (%s) to load model not exists."
        # if current path is a prefix, and the path.pdparams or path.pdopt
        # is exist, users may want use `paddle.load` load the result of 
        # `fluid.save_dygraph`, we raise error here for users
        params_file_path = path + ".pdparams"
        opti_file_path = path + ".pdopt"
        if os.path.exists(params_file_path) or os.path.exists(opti_file_path):
            error_msg += " If you want to load the results saved by `fluid.save_dygraph`, " \
                "please specify the full file name, not just the file name prefix. For " \
                "example, it should be written as `paddle.load('model.pdparams')` instead of " \
                "`paddle.load('model')`."
        raise ValueError(error_msg % path)
    else:
        if prefix_format_exist:
            file_prefix = os.path.basename(path)
            model_path = os.path.dirname(path)
            if config.model_filename is not None:
                warnings.warn(
                    "When loading the result saved with the "
                    "specified file prefix, the ``model_filename`` config does "
                    "not take effect.")
            config.model_filename = file_prefix + INFER_MODEL_SUFFIX
            if config.params_filename is not None:
                warnings.warn(
                    "When loading the result saved with the "
                    "specified file prefix, the ``params_filename`` config does "
                    "not take effect.")
            config.params_filename = file_prefix + INFER_PARAMS_SUFFIX
        else:
            # Compatible with the old save_inference_model format
            model_path = path

    return model_path, config


def _parse_load_config(configs):
183 184 185
    supported_configs = [
        'model_filename', 'params_filename', 'keep_name_table', 'return_numpy'
    ]
186 187 188 189 190 191 192 193 194 195 196 197 198

    # input check
    for key in configs:
        if key not in supported_configs:
            raise ValueError(
                "The additional config (%s) of `paddle.load` is not supported."
                % key)

    # construct inner config
    inner_config = _SaveLoadConfig()
    inner_config.model_filename = configs.get('model_filename', None)
    inner_config.params_filename = configs.get('params_filename', None)
    inner_config.keep_name_table = configs.get('keep_name_table', None)
199
    inner_config.return_numpy = configs.get('return_numpy', False)
200 201 202 203

    return inner_config


204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231
def _parse_save_config(configs):
    supported_configs = ['use_binary_format', 'pickle_protocol']

    # input check
    for key in configs:
        if key not in supported_configs:
            raise ValueError(
                "The additional config (%s) of `paddle.save` is not supported."
                % key)

    # construct inner config
    inner_config = _SaveLoadConfig()
    inner_config.use_binary_format = configs.get('use_binary_format', False)
    inner_config.pickle_protocol = configs.get('pickle_protocol', None)

    return inner_config


def _pickle_save(obj, f, protocol):
    # TODO(weixin):add support for BytesIO.
    if not isinstance(protocol, int):
        raise ValueError("The 'protocol' MUST be `int`, but received {}".format(
            type(protocol)))

    if protocol < 2 or protocol > 4:
        raise ValueError("Expected 1<'protocol'<5, but received protocol={}".
                         format(protocol))

232
    def reduce_varbase(self):
233 234 235 236 237 238 239 240 241 242
        data = self.numpy()
        name = self.name

        return (tuple, ((name, data), ))

    def reduce_LoDTensor(self):
        data = np.array(self)

        return (eval, ('data', {'data': data}))

243
    def reduce_Layer(self):
244 245
        raise ValueError(
            "paddle do not support saving `paddle.nn.Layer` object.")
246 247 248 249 250 251 252 253 254 255

    dispatch_table_layer = dict()

    def create_layer_dispatch_table(layer):
        dispatch_table_layer[layer.__class__] = reduce_Layer
        return layer

    _parse_every_object(obj, lambda v: isinstance(v, core.Layer),
                        create_layer_dispatch_table)

256 257
    def add_dispatch_table():
        # This is not a good method, because the pickle module has been modified.
258 259
        pickle.dispatch_table[core.VarBase] = reduce_varbase
        pickle.dispatch_table[ParamBase] = reduce_varbase
260
        pickle.dispatch_table[core.LoDTensor] = reduce_LoDTensor
261
        pickle.dispatch_table.update(dispatch_table_layer)
262 263 264 265 266

    def pop_dispatch_table():
        pickle.dispatch_table.pop(core.VarBase)
        pickle.dispatch_table.pop(core.LoDTensor)
        pickle.dispatch_table.pop(ParamBase)
267 268
        for k in dispatch_table_layer:
            pickle.dispatch_table.pop(k)
269 270 271 272 273 274 275 276 277 278 279

    # When value of dict is lager than 4GB ,there is a Bug on 'MAC python3'
    if sys.platform == 'darwin' and sys.version_info.major == 3:
        add_dispatch_table()
        pickle_bytes = pickle.dumps(obj)
        pop_dispatch_table()

        max_bytes = 2**30
        for i in range(0, len(pickle_bytes), max_bytes):
            f.write(pickle_bytes[i:i + max_bytes])
    else:
T
tianshuo78520a 已提交
280 281
        pickler = pickle.Pickler(f, protocol)
        pickler.dispatch_table = copyreg.dispatch_table.copy()
282

T
tianshuo78520a 已提交
283 284 285 286 287
        pickler.dispatch_table[core.VarBase] = reduce_varbase
        pickler.dispatch_table[core.LoDTensor] = reduce_LoDTensor
        pickler.dispatch_table[ParamBase] = reduce_varbase
        pickler.dispatch_table.update(dispatch_table_layer)
        pickler.dump(obj)
288 289


290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308
def _contain_x(obj, condition_func):
    if isinstance(obj, core.SelectedRows):
        raise NotImplementedError(
            "`paddle.save` do not support saving 'SelectedRows'.")

    if condition_func(obj):
        return True
    elif type(obj) in (dict, collections.OrderedDict, list, tuple):
        if type(obj) in (dict, collections.OrderedDict):
            keys = list(obj.keys())
        else:
            keys = range(len(obj))
        flag = False
        for key in keys:
            flag |= _contain_x(obj[key], condition_func)
            if flag:
                return True
        return flag
    else:
309
        return False
310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331


def _is_state_dict(obj):
    if isinstance(obj, dict):

        def condition(obj):
            return isinstance(obj, (core.Layer, Program, core.VarBase,
                                    core.LoDTensor, core.SelectedRows))

        # If the value of a dict is a core.VarBase/LoDTensor or a dict 
        # that does not contain a paddle type(Layer, Program, VarBase, LoDTensor, SelectedRows), 
        # the dict is considered to be a state_ dict.
        for key, value in obj.items():
            if isinstance(value, dict):
                for k, v in value.items():
                    if _contain_x(v, condition):
                        return False
            elif not isinstance(value, (core.VarBase, core.LoDTensor)):
                return False
        return True

    return False
332 333 334 335 336 337


def _transformed_from_varbase(obj):
    # In paddle2.1 version, VarBase is saved as tuple(tensor.name, tensor.numpy()).
    # When executing paddle.load, use this function to determine whether to restore to VarBase/LoDTensor.
    if isinstance(obj, tuple) and len(obj) == 2:
T
tianshuo78520a 已提交
338
        name_types = str
339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384
        if isinstance(obj[0], name_types) and isinstance(obj[1], np.ndarray):
            return True
    return False


def _transformed_from_lodtensor(obj):
    # In paddle2.1 version, LoDTensor is saved as np.array(tensor).
    # When executing paddle.load, use this function to determine whether to restore to VarBase/LoDTensor.
    if isinstance(obj, np.ndarray):
        return True
    return False


def _to_LodTensor(ndarray):
    if not isinstance(ndarray, np.ndarray):
        raise TypeError(
            'Type of `ndarray` should be numpy.ndarray, but received {}.'.
            format(type(ndarray)))
    t = core.LoDTensor()
    place = _current_expected_place()
    t.set(ndarray, place)
    return t


def _tuple_to_tensor(obj, return_numpy):
    if return_numpy:
        return obj[1]
    if in_dygraph_mode():
        t = paddle.to_tensor(obj[1])
        # This function does modify the name of return value.
        # Loading the same variable multiple times may cause the same name.
        t.name = obj[0]
        return t
    else:
        return _to_LodTensor(obj[1])


def _ndarray_to_tensor(obj, return_numpy):
    if return_numpy:
        return obj
    if in_dygraph_mode():
        return paddle.to_tensor(obj)
    else:
        return _to_LodTensor(obj)


385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454
def _lod_tensor2varbase(tensor):
    return_var = _varbase_creator()
    return_var.value().get_tensor().set(tensor, _current_expected_place())
    return return_var


def _parse_every_object(obj, condition_func, convert_func):
    if condition_func(obj):
        return convert_func(obj)
    elif type(obj) in (dict, collections.OrderedDict, list):
        if type(obj) == list:
            keys = range(len(obj))
        else:
            keys = list(obj.keys())
        for key in keys:
            if condition_func(obj[key]):
                obj[key] = convert_func(obj[key])
            else:
                obj[key] = _parse_every_object(obj[key], condition_func,
                                               convert_func)
        return obj
    elif type(obj) == tuple:
        return tuple(
            _parse_every_object(list(obj), condition_func, convert_func))
    elif type(obj) == set:
        return set(_parse_every_object(list(obj), condition_func, convert_func))
    else:
        if isinstance(obj, collections.Iterable) and not isinstance(obj, (
                str, np.ndarray, core.VarBase, core.LoDTensor)):
            raise NotImplementedError(
                "The iteratable objects supported are tuple, list, dict, OrderedDict, string. But received {}.".
                format(type(obj)))
        return obj


def _parse_load_result(obj, return_numpy):
    def is_layer(obj):
        return isinstance(obj, core.Layer)

    def parse_layer(obj):
        temp_dict = _parse_load_result(obj.__dict__, False)
        obj.__dict__.update(temp_dict)
        return obj

    if _contain_x(obj, is_layer):
        if not in_dygraph_mode():
            raise ValueError(
                "Layer can only be loaded in dynamic graph mode, but now in static graph mode."
            )

        _parse_every_object(obj, is_layer, parse_layer)

    def tuple_to_tensor(obj):
        return _tuple_to_tensor(obj, return_numpy=return_numpy)

    def ndarray_to_tensor(obj):
        return _ndarray_to_tensor(obj, return_numpy=return_numpy)

    # tuple(name, ndarry) was converted from varbase of paddle2.1, 
    # and all tuple(name, ndarry) are converted to tensor.
    if _contain_x(obj, _transformed_from_varbase):
        return _parse_every_object(obj, _transformed_from_varbase,
                                   tuple_to_tensor)
    # If there is no tuple(name, ndary), it is considered to be saved by paddle2.0 
    # or converted from LoDTensor, and all ndarrays are converted to tensor.
    else:
        return _parse_every_object(obj, _transformed_from_lodtensor,
                                   ndarray_to_tensor)


455 456 457
def _save_lod_tensor(tensor, file_name):
    if not tensor._is_initialized():
        raise ValueError("The saved tensor is not initialized.")
458 459 460 461 462 463 464 465 466 467 468 469 470 471 472
    if _is_file_path(file_name):
        _seek = core.save_lod_tensor(tensor, file_name)
        # '_seek' is the end position of this tensor in the file.

    elif _is_memory_buffer(file_name):
        tensor_bytes = core.save_lod_tensor_to_memory(tensor)

        with _open_file_buffer(file_name, 'wb') as f:
            f.write(tensor_bytes)
            _seek = f.tell()

    else:
        raise NotImplementedError(
            'Only supports saving objects to file or BytesIO, but received {}'.
            format(type(file_name)))
473 474 475 476 477
    return _seek


def _load_lod_tensor(file_name):
    temp_t = paddle.fluid.core.LoDTensor()
478 479 480 481 482 483 484 485 486 487 488 489 490 491 492
    if _is_file_path(file_name):
        # '_seek' is the end position of this tensor in the file.
        _seek = paddle.fluid.core.load_lod_tensor(temp_t, file_name)

    elif _is_memory_buffer(file_name):
        with _open_file_buffer(file_name, 'rb') as f:
            tensor_bytes = f.read()
            paddle.fluid.core.load_lod_tensor_from_memory(temp_t, tensor_bytes)
            _seek = f.tell()

    else:
        raise NotImplementedError(
            'Only supports load objects from file or BytesIO, but received {}'.
            format(type(file_name)))

493 494 495 496 497 498
    return temp_t, _seek


def _save_selected_rows(selected_rows, file_name):
    if not selected_rows.get_tensor()._is_initialized():
        raise ValueError("The saved tensor is not initialized.")
499 500 501 502 503 504 505 506 507 508 509 510 511
    if _is_file_path(file_name):
        # '_seek' is the end position of this SelectedRows in the file.
        _seek = core.save_selected_rows(selected_rows, file_name)

    elif _is_memory_buffer(file_name):
        selected_rows_bytes = core.save_selected_rows_to_memory(selected_rows)
        with _open_file_buffer(file_name, 'wb') as f:
            f.write(selected_rows_bytes)
            _seek = f.tell()
    else:
        raise NotImplementedError(
            'Only supports saving objects to file or BytesIO, but received {}'.
            format(type(file_name)))
512 513 514 515 516
    return _seek


def _load_selected_rows(file_name):
    temp_sr = core.SelectedRows()
517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532
    if _is_file_path(file_name):
        # '_seek' is the end position of this SelectedRows in the file.
        _seek = core.load_selected_rows(temp_sr, file_name)

    elif _is_memory_buffer(file_name):
        with _open_file_buffer(file_name, 'rb') as f:
            selected_rows_bytes = f.read()
            paddle.fluid.core.load_selected_rows_from_memory(
                temp_sr, selected_rows_bytes)
        _seek = f.tell()

    else:
        raise NotImplementedError(
            'Only supports load objects from file or BytesIO, but received {}'.
            format(type(file_name)))

533 534 535 536 537 538 539 540
    return temp_sr, _seek


def _save_binary_var(obj, path):
    if isinstance(obj, core.LoDTensor):
        _save_lod_tensor(obj, path)
    elif isinstance(obj, core.SelectedRows):
        _save_selected_rows(obj, path)
541 542
    elif isinstance(obj, core.VarBase):
        _save_lod_tensor(obj.value().get_tensor(), path)
543 544 545 546 547 548 549
    else:
        # Since the concept of 'Tensor' is only exposed to users, the error message can only contain tensor instead of 'LoDTensor' or 'SelectedRows'
        raise NotImplementedError(
            "When use_binary_format = True, `paddle.save`  expected Tensor, but received {}.".
            format(type(obj)))


550
def save(obj, path, protocol=4, **configs):
551 552 553 554
    '''
    Save an object to the specified path.
    
    .. note::
555
        Now supports saving ``state_dict`` of Layer/Optimizer, Tensor and nested structure containing Tensor, Program.
556 557

    .. note::
558 559 560 561 562 563 564
        Different from ``paddle.jit.save``, since the save result of ``paddle.save`` is a single file, 
        there is no need to distinguish multiple saved files by adding a suffix. The argument ``path`` 
        of ``paddle.save`` will be directly used as the saved file name instead of a prefix. 
        In order to unify the saved file name format, we recommend using the paddle standard suffix:
        1. for ``Layer.state_dict`` , recommend to use ``.pdparams`` ; 
        2. for ``Optimizer.state_dict`` , recommend to use ``.pdopt`` . 
        For specific examples, please refer to API code examples.
565 566 567
    
    Args:
        obj(Object) : The object to be saved.
568
        path(str|BytesIO) : The path/buffer of the object to be saved. 
569
          If saved in the current directory, the input path string will be used as the file name. 
570
        protocol(int, optional): The protocol version of pickle module must be greater than 1 and less than 5.
571
                                 Default: 4
572 573 574 575
        **configs(dict, optional): optional keyword arguments. The following options are currently supported:
          use_binary_format(bool): When the saved object is static graph variable, you can specify ``use_binary_for_var``. 
          If True, save the file in the c++ binary format when saving a single static graph variable; otherwise, save it in pickle format.
          Default: False
576 577 578 579 580 581 582

    Returns:
        None

    Examples:
        .. code-block:: python

583
            # example 1: dynamic graph
584 585 586
            import paddle
            emb = paddle.nn.Embedding(10, 10)
            layer_state_dict = emb.state_dict()
587 588

            # save state_dict of emb
589
            paddle.save(layer_state_dict, "emb.pdparams")
590 591

            scheduler = paddle.optimizer.lr.NoamDecay(
592 593 594 595 596
                d_model=0.01, warmup_steps=100, verbose=True)
            adam = paddle.optimizer.Adam(
                learning_rate=scheduler,
                parameters=emb.parameters())
            opt_state_dict = adam.state_dict()
597 598

            # save state_dict of optimizer
599
            paddle.save(opt_state_dict, "adam.pdopt")
600 601 602
            # save weight of emb
            paddle.save(emb.weight, "emb.weight.pdtensor")

W
WeiXin 已提交
603 604 605 606 607 608 609 610 611 612 613 614
            # example 2: Save multiple state_dict at the same time
            from paddle import nn
            from paddle.optimizer import Adam

            layer = paddle.nn.Linear(3, 4)
            adam = Adam(learning_rate=0.001, parameters=layer.parameters())
            obj = {'model': layer.state_dict(), 'opt': adam.state_dict(), 'epoch': 100}
            path = 'example/model.pdparams'
            paddle.save(obj, path)


            # example 3: static graph
615 616 617 618 619 620 621 622 623 624 625 626 627 628 629
            import paddle
            import paddle.static as static

            paddle.enable_static()

            # create network
            x = paddle.static.data(name="x", shape=[None, 224], dtype='float32')
            z = paddle.static.nn.fc(x, 10)

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()
            for var in prog.list_vars():
                if list(var.shape) == [224, 10]:
W
WeiXin 已提交
630
                    tensor = var.get_value()
631 632 633 634 635 636 637 638 639
                    break

            # save/load tensor
            path_tensor = 'temp/tensor.pdtensor'
            paddle.save(tensor, path_tensor)

            # save/load state_dict
            path_state_dict = 'temp/model.pdparams'
            paddle.save(prog.state_dict("param"), path_tensor)
W
WeiXin 已提交
640 641 642 643 644 645 646 647 648 649 650 651

            # example 4: save program
            import paddle

            paddle.enable_static()

            data = paddle.static.data(
                name='x_static_save', shape=(None, 224), dtype='float32')
            y_static = z = paddle.static.nn.fc(data, 10)
            main_program = paddle.static.default_main_program()
            path = "example/main_program.pdmodel"
            paddle.save(main_program, path)
652

653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684

            # example 5: save object to memory
            from io import BytesIO
            import paddle
            from paddle.nn import Linear
            paddle.disable_static()

            linear = Linear(5, 10)
            state_dict = linear.state_dict()
            byio = BytesIO()
            paddle.save(state_dict, byio)
            tensor = paddle.randn([2, 3], dtype='float32')
            paddle.save(tensor, byio)
    
    '''
    if _is_file_path(path):
        # 1. input check
        filename = os.path.basename(path)
        if filename == "":
            raise ValueError(
                "The input path MUST be format of dirname/filename "
                "[dirname\\filename in Windows system], but received "
                "filename is empty string.")

        # 2. save object
        dirname = os.path.dirname(path)
        if dirname and not os.path.exists(dirname):
            os.makedirs(dirname)
    elif not _is_memory_buffer(path):
        raise ValueError(
            "only supports saving objects to file and `BytesIO`, but got {}".
            format(type(path)))
685 686 687 688 689 690 691 692

    config = _parse_save_config(configs)

    if not isinstance(config.use_binary_format, bool):
        raise TypeError(
            "Type of `use_binary_format` should be bool, but received {}.".
            format(type(config.use_binary_format)))

693 694
    if config.use_binary_format:
        _save_binary_var(obj, path)
695
    else:
696 697 698 699 700 701
        # `protocol` need to be used, `pickle_protocol` is a deprecated arg.
        if config.pickle_protocol is not None:
            protocol = config.pickle_protocol
            warnings.warn(
                "'pickle_protocol' is a deprecated argument. Please use 'protocol' instead."
            )
702

703 704
        if isinstance(obj, Program):
            obj.desc.flush()
705
            with _open_file_buffer(path, "wb") as f:
706
                f.write(obj.desc.serialize_to_string())
707 708

        elif _is_state_dict(obj):
709 710 711 712 713
            if in_dygraph_mode():
                _legacy_save(obj, path, protocol)
            else:
                _legacy_static_save(obj, path, protocol)
        else:
714
            with _open_file_buffer(path, 'wb') as f:
715
                _pickle_save(obj, f, protocol)
716

717 718

def _legacy_save(obj, path, protocol=2):
719 720 721 722 723 724 725 726 727
    # 1. input check
    if not isinstance(obj, dict):
        raise NotImplementedError(
            "Now only supports save state_dict of Layer or Optimizer, "
            "expect dict, but received %s." % type(obj))

    if len(obj) == 0:
        warnings.warn("The input state dict is empty, no need to save.")

728
    if not isinstance(protocol, int):
W
WeiXin 已提交
729
        raise ValueError("The 'protocol' MUST be `int`, but received {}".format(
730
            type(protocol)))
W
WeiXin 已提交
731

732
    if protocol < 2 or protocol > 4:
W
WeiXin 已提交
733
        raise ValueError("Expected 1<'protocol'<5, but received protocol={}".
734
                         format(protocol))
W
WeiXin 已提交
735

736 737 738 739 740 741 742 743 744 745 746
    if _is_file_path(path):
        filename = os.path.basename(path)
        if filename == "":
            raise ValueError(
                "The input path MUST be format of dirname/filename "
                "[dirname\\filename in Windows system], but received "
                "filename is empty string.")
        # 2. save object
        dirname = os.path.dirname(path)
        if dirname and not os.path.exists(dirname):
            os.makedirs(dirname)
747

W
WeiXin 已提交
748 749 750
    if isinstance(obj, dict):
        saved_obj = _build_saved_state_dict(obj)

751
    saved_obj = _unpack_saved_dict(saved_obj, protocol)
752

753
    # When value of dict is lager than 4GB ,there is a Bug on 'MAC python3'
754 755
    if _is_file_path(
            path) and sys.platform == 'darwin' and sys.version_info.major == 3:
756
        pickle_bytes = pickle.dumps(saved_obj, protocol=protocol)
757 758 759 760 761
        with open(path, 'wb') as f:
            max_bytes = 2**30
            for i in range(0, len(pickle_bytes), max_bytes):
                f.write(pickle_bytes[i:i + max_bytes])
    else:
762
        with _open_file_buffer(path, 'wb') as f:
763
            pickle.dump(saved_obj, f, protocol=protocol)
764 765


766
def load(path, **configs):
767 768 769 770
    '''
    Load an object can be used in paddle from specified path.

    .. note::
771
        Now supports loading ``state_dict`` of Layer/Optimizer, Tensor and nested structure containing Tensor, Program.
772 773

    .. note::
774 775 776 777
        In order to use the model parameters saved by paddle more efficiently, 
        ``paddle.load`` supports loading ``state_dict`` of Layer from the result of 
        other save APIs except ``paddle.save`` , but the argument ``path`` format is 
        different:
778 779 780 781 782 783 784 785 786 787 788 789
        1. loading from ``paddle.static.save`` or ``paddle.Model().save(training=True)`` ,  
        ``path`` needs to be a complete file name, such as ``model.pdparams`` or 
        ``model.pdopt`` ; 
        2. loading from ``paddle.jit.save`` or ``paddle.static.save_inference_model`` 
        or ``paddle.Model().save(training=False)`` , ``path`` need to be a file prefix, 
        such as ``model/mnist``, and ``paddle.load`` will get information from 
        ``mnist.pdmodel`` and ``mnist.pdiparams`` ;
        3. loading from paddle 1.x APIs ``paddle.fluid.io.save_inference_model`` or 
        ``paddle.fluid.io.save_params/save_persistables`` , ``path`` need to be a 
        directory, such as ``model`` and model is a directory.

    .. note::
790
        If you load ``state_dict`` from the saved result of static mode API such as 
791
        ``paddle.static.save`` or ``paddle.static.save_inference_model`` , 
792 793 794
        the structured variable name in dynamic mode will cannot be restored. 
        You need to set the argument ``use_structured_name=False`` when using 
        ``Layer.set_state_dict`` later.
795 796

    Args:
797
        path(str|BytesIO) : The path/buffer to load the target object. Generally, the path is the target 
798 799
            file path. When loading state_dict from the saved result of the API used to save 
            the inference model, the path may be a file prefix or directory.
800 801 802 803
        **configs (dict, optional): other load configuration options for compatibility. We do not 
            recommend using these configurations, they may be removed in the future. If not necessary, 
            DO NOT use them. Default None.
            The following options are currently supported:
804
            (1) model_filename (str): The inference model file name of the paddle 1.x 
805
            ``save_inference_model`` save format. Default file name is :code:`__model__` . 
806
            (2) params_filename (str): The persistable variables file name of the paddle 1.x 
807
            ``save_inference_model`` save format. No default file name, save variables separately 
808 809 810
            by default.            
            (3) return_numpy(bool): If specified as True, return tensor as numpy.ndarray, otherwise return tensor as paddle.Tensor. 
            Default False.
811 812 813 814 815 816 817

    Returns:
        Object(Object): a target object can be used in paddle

    Examples:
        .. code-block:: python

818 819
            # example 1: dynamic graph
            import paddle
820 821
            emb = paddle.nn.Embedding(10, 10)
            layer_state_dict = emb.state_dict()
822 823

            # save state_dict of emb
824
            paddle.save(layer_state_dict, "emb.pdparams")
825 826

            scheduler = paddle.optimizer.lr.NoamDecay(
827 828 829 830 831
                d_model=0.01, warmup_steps=100, verbose=True)
            adam = paddle.optimizer.Adam(
                learning_rate=scheduler,
                parameters=emb.parameters())
            opt_state_dict = adam.state_dict()
832 833

            # save state_dict of optimizer
834
            paddle.save(opt_state_dict, "adam.pdopt")
835 836
            # save weight of emb
            paddle.save(emb.weight, "emb.weight.pdtensor")
837

838
            # load state_dict of emb
839
            load_layer_state_dict = paddle.load("emb.pdparams")
840
            # load state_dict of optimizer
841
            load_opt_state_dict = paddle.load("adam.pdopt")
842 843 844 845
            # load weight of emb
            load_weight = paddle.load("emb.weight.pdtensor")


W
WeiXin 已提交
846 847 848 849 850 851 852 853 854 855 856 857 858
            # example 2: Load multiple state_dict at the same time
            from paddle import nn
            from paddle.optimizer import Adam

            layer = paddle.nn.Linear(3, 4)
            adam = Adam(learning_rate=0.001, parameters=layer.parameters())
            obj = {'model': layer.state_dict(), 'opt': adam.state_dict(), 'epoch': 100}
            path = 'example/model.pdparams'
            paddle.save(obj, path)
            obj_load = paddle.load(path)


            # example 3: static graph
859 860 861 862 863 864 865 866 867 868 869 870 871 872 873
            import paddle
            import paddle.static as static

            paddle.enable_static()

            # create network
            x = paddle.static.data(name="x", shape=[None, 224], dtype='float32')
            z = paddle.static.nn.fc(x, 10)

            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            exe.run(paddle.static.default_startup_program())
            prog = paddle.static.default_main_program()
            for var in prog.list_vars():
                if list(var.shape) == [224, 10]:
W
WeiXin 已提交
874
                    tensor = var.get_value()
875 876 877 878 879 880 881 882 883 884 885 886
                    break

            # save/load tensor
            path_tensor = 'temp/tensor.pdtensor'
            paddle.save(tensor, path_tensor)
            load_tensor = paddle.load(path_tensor)

            # save/load state_dict
            path_state_dict = 'temp/model.pdparams'
            paddle.save(prog.state_dict("param"), path_tensor)
            load_state_dict = paddle.load(path_tensor)

W
WeiXin 已提交
887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902

            # example 4: load program
            import paddle

            paddle.enable_static()

            data = paddle.static.data(
                name='x_static_save', shape=(None, 224), dtype='float32')
            y_static = z = paddle.static.nn.fc(data, 10)
            main_program = paddle.static.default_main_program()
            path = "example/main_program.pdmodel"
            paddle.save(main_program, path)
            load_main = paddle.load(path)
            print(load_main)


903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918
            # example 5: save object to memory
            from io import BytesIO
            import paddle
            from paddle.nn import Linear
            paddle.disable_static()

            linear = Linear(5, 10)
            state_dict = linear.state_dict()
            byio = BytesIO()
            paddle.save(state_dict, byio)
            tensor = paddle.randn([2, 3], dtype='float32')
            paddle.save(tensor, byio)
            byio.seek(0)
            # load state_dict
            dict_load = paddle.load(byio)

919
    '''
920

921
    if _is_memory_buffer(path) or os.path.isfile(path):
922
        config = _parse_load_config(configs)
T
tianshuo78520a 已提交
923
        exception_type = pickle.UnpicklingError
W
WeiXin 已提交
924
        try:
925
            with _open_file_buffer(path, 'rb') as f:
W
WeiXin 已提交
926
                # When value of dict is lager than 4GB ,there is a Bug on 'MAC python3'
927 928 929
                if _is_file_path(
                        path
                ) and sys.platform == 'darwin' and sys.version_info.major == 3:
W
WeiXin 已提交
930 931
                    load_result = _pickle_loads_mac(path, f)
                else:
T
tianshuo78520a 已提交
932
                    load_result = pickle.load(f, encoding='latin1')
933

W
WeiXin 已提交
934 935
                # TODO(weixin):If `obj` is any object, the judgment condition should be more precise.
                if isinstance(load_result, dict):
936
                    load_result = _pack_loaded_dict(load_result)
W
WeiXin 已提交
937 938 939 940 941 942 943 944 945 946 947
                    # paddle2.0: paddle.save/load
                    if "StructuredToParameterName@@" in load_result:

                        for key in load_result["StructuredToParameterName@@"]:
                            load_result[key] = _ndarray_to_tensor(
                                load_result[key], config.return_numpy)

                        if not config.keep_name_table and "StructuredToParameterName@@" in load_result:
                            del load_result["StructuredToParameterName@@"]
                    else:
                        # paddle2.1 static.save/load
948 949
                        load_result = _parse_load_result(load_result,
                                                         config.return_numpy)
950 951

                else:
952 953
                    load_result = _parse_load_result(load_result,
                                                     config.return_numpy)
954 955 956 957 958 959 960 961

        except exception_type as msg_pickle:
            try:
                tensor, _ = _load_selected_rows(path)
                return tensor
            except:
                try:
                    tensor, _ = _load_lod_tensor(path)
962 963 964 965 966 967
                    if config.return_numpy:
                        return np.array(tensor)
                    else:
                        if in_dygraph_mode():
                            return _lod_tensor2varbase(tensor)
                        return tensor
968 969
                except:
                    try:
970
                        with _open_file_buffer(path, "rb") as f:
971 972 973 974 975 976 977 978
                            program_desc_str = f.read()
                            program = Program.parse_from_string(
                                program_desc_str)
                            return program
                    except:
                        raise ValueError(
                            "`paddle.load` can not parse the file:{}.".format(
                                path))
979 980 981 982 983 984 985 986

    else:
        load_result = _legacy_load(path, **configs)

    return load_result


def _legacy_load(path, **configs):
987
    load_result = None
988 989
    config = _parse_load_config(configs)

990
    if os.path.isfile(path) or _is_memory_buffer(path):
991
        # we think path is file means this file is created by paddle.save
992
        with _open_file_buffer(path, 'rb') as f:
T
tianshuo78520a 已提交
993
            load_result = pickle.load(f, encoding='latin1')
994
        load_result = _pack_loaded_dict(load_result)
995 996
        if not config.keep_name_table and "StructuredToParameterName@@" in load_result:
            del load_result["StructuredToParameterName@@"]
997 998 999
    else:
        # file prefix and directory are compatible cases
        model_path, config = _build_load_path_and_config(path, config)
1000 1001 1002 1003 1004
        # check whether model file exists
        if config.model_filename is None:
            model_filename = '__model__'
        else:
            model_filename = config.model_filename
1005
        model_file_path = os.path.join(model_path, model_filename)
1006 1007 1008 1009 1010 1011 1012 1013 1014

        if os.path.exists(model_file_path):
            # Load state dict by `jit.save/io.save_inference_model` save format
            # NOTE(chenweihang): [ Compatibility of save_inference_model save format ]
            # The model saved by `save_inference_model` does not completely correspond to 
            # the information required by the `state_dict` under the dygraph. 
            # `save_inference_model` not save structured name, we need to remind 
            # the user to configure the `use_structured_name` argument when `set_state_dict`
            # NOTE(chenweihang): `jit.save` doesn't save optimizer state 
1015
            load_result = _load_state_dict_from_save_inference_model(model_path,
1016 1017 1018 1019 1020 1021 1022 1023
                                                                     config)
        else:
            # load state dict by `io.save_params/persistables` save format
            # TODO(chenweihang): [ Now only supports loading parameters seperately ]
            # If users save all parameters as one file, the [ variable.name -> variable ]
            # mapping info will lost, so users need to give variable list, but users build 
            # variable list in dygraph mode is difficult, we recommend users to use
            # paddle.static.load_program_state in this case
1024
            load_result = _load_state_dict_from_save_params(model_path)
1025 1026

    return load_result