io.py 68.2 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

17
import os
T
bug fix  
tangwei12 已提交
18
import errno
D
dzhwinter 已提交
19
import warnings
20
import six
21
import logging
H
hong 已提交
22
import pickle
23
from functools import reduce
24

H
hong 已提交
25 26
import numpy as np

27 28 29
import paddle
import paddle.reader
from paddle.reader import *
30
from paddle.fluid import layers
H
hong 已提交
31
from paddle.fluid.executor import Executor, global_scope
32
from paddle.fluid.evaluator import Evaluator
33
from paddle.fluid.framework import Program, Parameter, default_main_program, default_startup_program, Variable, program_guard
T
tangwei12 已提交
34
from paddle.fluid.compiler import CompiledProgram
35
from paddle.fluid.log_helper import get_logger
S
sneaxiy 已提交
36 37
from . import reader
from .reader import *
K
fix bug  
Kexin Zhao 已提交
38
from . import core
39
from .. import compat as cpt
40

41 42
batch = paddle.batch

43
__all__ = [
H
hong 已提交
44 45 46 47 48 49 50 51 52 53 54 55 56
    'save_vars',
    'save_params',
    'save_persistables',
    'load_vars',
    'load_params',
    'load_persistables',
    'save_inference_model',
    'load_inference_model',
    'batch',
    'save',
    'load',
    'load_program_state',
    'set_program_state',
57
] + reader.__all__ + paddle.reader.__all__
58

59 60
_logger = get_logger(
    __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s')
61

62 63

def is_parameter(var):
F
fengjiayi 已提交
64 65
    """
    Check whether the given variable is an instance of Parameter.
66 67

    Args:
F
fengjiayi 已提交
68
        var(Variable): The variable to be checked.
69 70

    Returns:
F
fengjiayi 已提交
71 72 73 74 75 76
        bool: True if the given `var` is an instance of Parameter,
        False if not.

    Examples:
        .. code-block:: python

77
            import paddle.fluid as fluid
F
fengjiayi 已提交
78 79
            param = fluid.default_main_program().global_block().var('fc.w')
            res = fluid.io.is_parameter(param)
80
    """
81 82 83 84
    return isinstance(var, Parameter)


def is_persistable(var):
F
fengjiayi 已提交
85 86 87 88 89 90 91 92 93 94 95 96 97
    """
    Check whether the given variable is persistable.

    Args:
        var(Variable): The variable to be checked.

    Returns:
        bool: True if the given `var` is persistable
        False if not.

    Examples:
        .. code-block:: python

98
            import paddle.fluid as fluid
99
            param = fluid.default_main_program().global_block().var('fc.b')
F
fengjiayi 已提交
100 101
            res = fluid.io.is_persistable(param)
    """
102
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
Y
yuyang18 已提交
103 104
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
            var.desc.type() == core.VarDesc.VarType.READER:
105
        return False
106 107 108
    return var.persistable


H
hong 已提交
109
def is_belong_to_optimizer(var):
H
hong 已提交
110 111 112 113
    if not isinstance(var, Parameter):
        return is_persistable(var)

    return False
H
hong 已提交
114 115


116 117
def _clone_var_in_block_(block, var):
    assert isinstance(var, Variable)
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
    if var.desc.type() == core.VarDesc.VarType.LOD_TENSOR:
        return block.create_var(
            name=var.name,
            shape=var.shape,
            dtype=var.dtype,
            type=var.type,
            lod_level=var.lod_level,
            persistable=True)
    else:
        return block.create_var(
            name=var.name,
            shape=var.shape,
            dtype=var.dtype,
            type=var.type,
            persistable=True)
133 134


C
chengduo 已提交
135 136 137 138 139 140 141 142 143 144 145 146 147 148
def _get_valid_program(main_program):
    if main_program is None:
        main_program = default_main_program()
    elif isinstance(main_program, CompiledProgram):
        main_program = main_program._program
        if main_program is None:
            raise TypeError("program should be as Program type or None")
        warnings.warn(
            "The input is a CompiledProgram, this is not recommended.")
    if not isinstance(main_program, Program):
        raise TypeError("program should be as Program type or None")
    return main_program


149 150 151 152 153
def save_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
154
              filename=None):
155
    """
156
    This API saves specific variables in the `Program` to files.
F
fengjiayi 已提交
157

158 159 160
    There are two ways to specify the variables to be saved: set variables in 
    a list and assign it to the `vars`, or use the `predicate` function to select
    variables that make `predicate(variable) == True`. The first way has a higher priority.
161

162 163 164
    The `dirname` is used to specify the folder where to save variables.
    If you prefer to save variables in separate files in the `dirname` floder,
    do not set `filename`. If you prefer to save all variables in a single file,
F
fengjiayi 已提交
165
    use `filename` to specify it.
166

F
fengjiayi 已提交
167 168
    Args:
        executor(Executor): The executor to run for saving variables.
169 170
        dirname(str): The folder where to save variables.
        main_program(Program, optional): The program whose variables will be saved.
171
                                    If it is None, the default main program will
F
fengjiayi 已提交
172 173
                                    be used automatically.
                                    Default: None
174 175 176 177 178 179 180 181
        vars(list[Variable], optional): The list contains all variables to be saved.
                                        Default: None
        predicate(function, optional): The function selects the variables that make
                                       `predicate(variable) == True`. 
                                       Default: None
        filename(str, optional): If you prefer to save all variables in a single file,
                                 use `filename` to specify it. Otherwise, let `filename` be None. 
                                 Default: None
F
fengjiayi 已提交
182 183 184 185 186 187 188 189 190 191

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

192
            import paddle.fluid as fluid
193

194 195 196 197 198 199 200 201 202 203 204
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32', name='fc_w')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32', name='fc_b')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
F
fengjiayi 已提交
205

206
            # The first usage: use `vars` to set the saved variables.
207 208
            var_list = [w, b]
            path = "./my_paddle_vars"
209
            fluid.io.save_vars(executor=exe, dirname=path, vars=var_list,
210 211 212 213 214 215 216 217 218 219
                            filename="vars_file")
            # w and b will be save in a file named "var_file".

            # The second usage: use `predicate` to select the saved variable.
            def name_has_fc(var):
                res = "fc" in var.name
                return res
            param_path = "./my_paddle_model"
            fluid.io.save_vars(executor=exe, dirname=param_path, main_program=main_prog, vars=None, predicate = name_has_fc)
            # all variables whose names contain "fc " are saved.
220
    """
L
lujun 已提交
221
    save_dirname = os.path.normpath(dirname)
C
chengduo 已提交
222
    main_program = _get_valid_program(main_program)
T
tangwei12 已提交
223

224 225 226
    if vars is None:
        save_vars(
            executor,
227
            main_program=main_program,
L
lujun 已提交
228
            dirname=save_dirname,
229
            vars=list(filter(predicate, main_program.list_vars())),
230
            filename=filename)
231
    else:
232 233 234 235 236 237 238
        # give warning when there is no var in model
        if len(list(vars)) == 0:
            warnings.warn(
                "no variable in your model, please ensure there are any variables in your model to save"
            )
            return None

239 240
        save_program = Program()
        save_block = save_program.global_block()
241 242

        save_var_map = {}
243
        for each_var in vars:
244 245 246
            # NOTE: don't save the variable which type is RAW
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
247
            new_var = _clone_var_in_block_(save_block, each_var)
248
            if filename is None:
249 250
                save_file_path = os.path.join(save_dirname, new_var.name)
                save_file_path = os.path.normpath(save_file_path)
251 252 253 254
                save_block.append_op(
                    type='save',
                    inputs={'X': [new_var]},
                    outputs={},
255
                    attrs={'file_path': save_file_path})
256 257 258
            else:
                save_var_map[new_var.name] = new_var

259
        if filename is not None:
260 261 262 263
            save_var_list = []
            for name in sorted(save_var_map.keys()):
                save_var_list.append(save_var_map[name])

264
            save_block.append_op(
265 266
                type='save_combine',
                inputs={'X': save_var_list},
267
                outputs={},
L
lujun 已提交
268
                attrs={'file_path': os.path.join(save_dirname, filename)})
269

270 271 272
        executor.run(save_program)


273
def save_params(executor, dirname, main_program=None, filename=None):
274
    """
275 276 277
    This operator saves all parameters from the :code:`main_program` to
    the folder :code:`dirname` or file :code:`filename`. You can refer to 
    :ref:`api_guide_model_save_reader_en` for more details.
F
fengjiayi 已提交
278

279 280 281
    Use the :code:`dirname` to specify the saving folder. If you would like to
    save parameters in separate files, set :code:`filename` None; if you would
    like to save all parameters in a single file, use :code:`filename` to specify
F
fengjiayi 已提交
282 283
    the file name.

284 285 286 287 288 289 290 291 292 293
    Note: 
        Some variables are not Parameter while they are necessary for
        training, such as learning rate, global step, etc. So you can NOT save 
        and continue your training just by :ref:`api_fluid_io_save_params`
        and :ref:`api_fluid_io_load_params`. Please use :ref:`api_fluid_io_save_persistables`
        and :ref:`api_fluid_io_load_persistables` instead. 
        
        If you want to save your model for the inference, please use the 
        :ref:`api_fluid_io_save_inference_model`. You can refer to
        :ref:`api_guide_model_save_reader_en` for more details.
F
fengjiayi 已提交
294 295

    Args:
296 297
        executor(Executor): The executor to run for saving parameters, You can 
                            refer to :ref:`api_guide_executor_en`.
F
fengjiayi 已提交
298
        dirname(str): The saving directory path.
299 300 301 302 303 304 305 306 307 308
        main_program(Program, optional): The program whose parameters will be
                                         saved. You can refer to 
                                         :ref:`api_guide_Program_en` for more 
                                         details. If it is None, the default main
                                         program will be used.
                                         Default: None
        filename(str, optional): The file to save all parameters. If you prefer
                                 to save parameters in different files, set it
                                 to None.
                                 Default: None
F
fengjiayi 已提交
309 310 311 312 313 314 315

    Returns:
        None

    Examples:
        .. code-block:: python

H
Huihuang Zheng 已提交
316
            import paddle.fluid as fluid
317 318 319 320 321 322 323 324 325 326
           
            params_path = "./my_paddle_model"
            image = fluid.data(name='img', shape=[None, 28, 28], dtype='float32')
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
            feeder = fluid.DataFeeder(feed_list=[image, label], place=fluid.CPUPlace())
            predict = fluid.layers.fc(input=image, size=10, act='softmax')
    
            loss = fluid.layers.cross_entropy(input=predict, label=label)
            avg_loss = fluid.layers.mean(loss)
            
F
fengjiayi 已提交
327
            exe = fluid.Executor(fluid.CPUPlace())
328 329 330 331
            exe.run(fluid.default_startup_program())
            fluid.io.save_params(executor=exe, dirname=params_path)
            # The parameters weights and bias of the fc layer in the network are going to 
            # be saved in different files in the path "./my_paddle_model" 
332 333 334 335
    """
    save_vars(
        executor,
        dirname=dirname,
336
        main_program=main_program,
337
        vars=None,
338
        predicate=is_parameter,
339
        filename=filename)
340 341


342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363
def _save_distributed_persistables(executor, dirname, main_program):
    """
    save_persistables for distributed training.
    the method will do things listed below:
    1.save part of persistable variables on trainer.
    2.receive "remote prefetch variables" from parameter servers and merge them.
    3.save "distributed lookup table" on parameter servers.
    4.receive "optimizer variables" from parameter servers and merge them.

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The saving directory path.
        main_program(Program): The program whose parameters will be
                            saved. the main_program must be the trainer_program
                            get after transpiler.

    Returns:
        None

    Examples:
        .. code-block:: python

364
            import paddle.fluid as fluid
365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            t = distribute_transpiler.DistributeTranspiler()
            t.transpile(...)
            train_program = t.get_trainer_program()
            _save_distributed_persistables(executor=exe, dirname=param_path, main_program=train_program)
    """

    def __save_remote_params(executor, dirname, remote_params_map):
        """
        recive params on pserver through rpc.
        if the params are be sliced, will concat them to one, then save it.
        """
        if not remote_params_map:
            return

        prog = Program()
        block = prog.global_block()

        # recv optimize vars from pserver
        for name, remote_params in remote_params_map.items():
            origin_var = None
            is_slice = False
            slice_vars = [0] * len(remote_params)
            slice_var_names = [""] * len(remote_params)
            endpoints = [""] * len(remote_params)

            for idx, optimizer in enumerate(remote_params):
                origin = optimizer.origin
                slice = optimizer.slice
                is_slice = optimizer.is_slice
                block_id = optimizer.block_id
                endpoint = optimizer.endpoint

                if idx == 0:
                    origin_var = block.create_var(
                        name=origin.name,
                        type=origin.type,
                        shape=origin.shape,
                        dtype=origin.dtype,
                        persistable=True)

                slice_var = block.create_var(
                    name="{}.slice.{}".format(slice.name, idx),
                    type=slice.type,
                    shape=slice.shape,
                    dtype=slice.dtype,
                    persistable=True)

                index = block_id if is_slice else idx
                slice_vars[index] = slice_var
                slice_var_names[index] = slice.name
                endpoints[index] = endpoint

            if is_slice:
                block.append_op(
                    type='recv',
                    inputs={"X": []},
                    outputs={"Out": slice_vars},
                    attrs={
                        "epmap": endpoints,
                        "with_barrier": False,
                        "varnames": slice_var_names,
                        "sync_mode": True
                    })
                block.append_op(
                    type='concat',
                    inputs={'X': slice_vars},
                    outputs={'Out': origin_var},
                    attrs={})
            else:
                block.append_op(
                    type='recv',
                    inputs={"X": []},
                    outputs={"Out": [origin_var]},
                    attrs={
                        "epmap": endpoints[:1],
                        "with_barrier": False,
                        "varnames": slice_var_names,
                        "sync_mode": True
                    })
            block.append_op(
                type='save',
                inputs={'X': [origin_var]},
                outputs={},
                attrs={'file_path': os.path.join(dirname, origin_var.name)})
            block.append_op(type='delete_var', inputs={'X': slice_vars})
        executor.run(prog)

    def __save_distributed_lookup_tables(executor, dirname,
                                         distributed_lookup_table, endpoints):
        """
        because the distributed lookup table may too huge to merge and save at one place,
        it will be saved at parameter server independent respectively.

        the save directory is dirname/"__lookup_table__".

        """
        prog = Program()
        block = prog.global_block()

        # if there is lookup table, the trainer 0 will notify all pserver to save.
        lookup_table_filename = os.path.join(dirname, "__lookup_table__")
        attrs = {}
        attrs['epmap'] = endpoints
        attrs['dir'] = lookup_table_filename
        attrs['lookup_table'] = distributed_lookup_table
        block.append_op(
            type='checkpoint_notify', inputs={}, outputs={}, attrs=attrs)
        executor.run(prog)

    def __exclude_vars(exclude_var_names=[]):
        def is_valid(var):
            if var.name in exclude_var_names:
                return False
            if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
                        var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
                        var.desc.type() == core.VarDesc.VarType.READER:
                return False
            return var.persistable

        return is_valid

    if not isinstance(main_program, Program):
T
tangwei12 已提交
489
        raise TypeError("'main_program' should be an instance of Program.")
490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522

    if not main_program._is_distributed:
        raise ValueError(
            "'_save_distributed_persistables' just be designed for distributed training."
        )

    remote_params_map = main_program._parameters_on_pservers.get_distributed_vars_by_vtypes(
        ["Optimizer", "RemotePrefetch"], groupby=True)

    exclude_var_names = []
    if remote_params_map:
        exclude_var_names.extend(remote_params_map.keys())

    if main_program._distributed_lookup_table:
        if isinstance(main_program._distributed_lookup_table, list):
            exclude_var_names.extend(main_program._distributed_lookup_table)
        else:
            exclude_var_names.append(main_program._distributed_lookup_table)

    local_vars = list(
        filter(__exclude_vars(exclude_var_names), main_program.list_vars()))
    save_vars(
        executor, main_program=main_program, dirname=dirname, vars=local_vars)

    if main_program._is_chief:
        if remote_params_map:
            __save_remote_params(executor, dirname, remote_params_map)
        if main_program._distributed_lookup_table:
            __save_distributed_lookup_tables(
                executor, dirname, main_program._distributed_lookup_table,
                main_program._endpoints)


523
def save_persistables(executor, dirname, main_program=None, filename=None):
524
    """
525 526 527 528 529
    This operator saves all persistable variables from :code:`main_program` to 
    the folder :code:`dirname` or file :code:`filename`. You can refer to 
    :ref:`api_guide_model_save_reader_en` for more details. And then
    saves these persistables variables to the folder :code:`dirname` or file 
    :code:`filename`. 
F
fengjiayi 已提交
530

531
    The :code:`dirname` is used to specify the folder where persistable variables
532
    are going to be saved. If you would like to save variables in separate
533 534
    files, set :code:`filename` None; if you would like to save all variables in a
    single file, use :code:`filename` to specify the file name.
F
fengjiayi 已提交
535 536 537

    Args:
        executor(Executor): The executor to run for saving persistable variables.
538 539 540 541 542 543 544 545 546 547 548 549
                            You can refer to :ref:`api_guide_executor_en` for 
                            more details.
        dirname(str): The saving directory path.
        main_program(Program, optional): The program whose persistbale variables will
                                         be saved. You can refer to 
                                         :ref:`api_guide_Program_en` for more details.
                                         If it is None, the default main program will 
                                         be used.
                                         Default: None.
        filename(str, optional): The file to save all variables. If you prefer to
                                 save variables in different files, set it to None.
                                 Default: None.
F
fengjiayi 已提交
550 551 552 553 554 555 556

    Returns:
        None

    Examples:
        .. code-block:: python

H
Huihuang Zheng 已提交
557
            import paddle.fluid as fluid
558 559 560 561 562 563 564 565 566 567
        
            dir_path = "./my_paddle_model"
            file_name = "persistables"
            image = fluid.data(name='img', shape=[None, 28, 28], dtype='float32')
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
            feeder = fluid.DataFeeder(feed_list=[image, label], place=fluid.CPUPlace())
           
            predict = fluid.layers.fc(input=image, size=10, act='softmax')
            loss = fluid.layers.cross_entropy(input=predict, label=label)
            avg_loss = fluid.layers.mean(loss)
F
fengjiayi 已提交
568
            exe = fluid.Executor(fluid.CPUPlace())
569 570 571 572 573
            exe.run(fluid.default_startup_program())
            fluid.io.save_persistables(executor=exe, dirname=dir_path, filename=file_name)
            # The persistables variables weights and bias in the fc layer of the network 
            # are going to be saved in the same file named "persistables" in the path
            # "./my_paddle_model"
574
    """
575 576 577 578 579 580 581 582 583 584 585
    if main_program and main_program._is_distributed:
        _save_distributed_persistables(
            executor, dirname=dirname, main_program=main_program)
    else:
        save_vars(
            executor,
            dirname=dirname,
            main_program=main_program,
            vars=None,
            predicate=is_persistable,
            filename=filename)
586 587


588 589 590 591 592
def load_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
593
              filename=None):
594
    """
595
    This API loads variables from files by executor.
F
fengjiayi 已提交
596

597 598 599 600
    There are two ways to specify the variables to be loaded: the first way, set
    variables in a list and assign it to the `vars`; the second way, use the 
    `predicate` function to select variables that make `predicate(variable) == True`. 
    The first way has a higher priority.
F
fengjiayi 已提交
601

602
    The `dirname` is used to specify the folder where to load variables.
603
    If variables were saved in separate files in the folder `dirname`,
604
    set `filename` None. If all variables were saved in a single file,
F
fengjiayi 已提交
605
    use `filename` to specify it.
606

F
fengjiayi 已提交
607 608
    Args:
        executor(Executor): The executor to run for loading variables.
609 610
        dirname(str): The folder where to load the variables.
        main_program(Program, optional): The program whose variables will be loaded.
611
                                    If it is None, the default main program will
F
fengjiayi 已提交
612 613
                                    be used automatically.
                                    Default: None
614
        vars(list[Variable], optional): The list that contains all variables to be loaded.
F
fengjiayi 已提交
615
                                   Default: None
616 617 618 619 620 621
        predicate(function, optional): The function selects variables that make 
                                        `predicate(variable) == True`.
                                        Default: None
        filename(str, optional): The file which saved all required variables. If variables
                                were saved in separate files, set it to be None.
                                Default: None
F
fengjiayi 已提交
622 623 624 625 626 627 628 629 630 631

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

632
            import paddle.fluid as fluid
633

634 635 636 637 638 639 640 641 642 643 644
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32', name='fc_w')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32', name='fc_b')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
F
fengjiayi 已提交
645

646 647 648 649 650 651 652 653 654 655 656
            # The first usage: using `vars` to specify the variables.
            path = "./my_paddle_vars"
            var_list = [w, b]
            fluid.io.save_vars(executor=exe, dirname=path, vars=var_list,
                               filename="vars_file")
            fluid.io.load_vars(executor=exe, dirname=path, vars=var_list,
                               filename="vars_file")
            # w and b will be loaded, and they are supposed to
            # be saved in the same file named 'var_file' in the path "./my_paddle_vars".

            # The second usage: using the `predicate` function to select variables
657
            param_path = "./my_paddle_model"
F
fengjiayi 已提交
658 659 660
            def name_has_fc(var):
                res = "fc" in var.name
                return res
661 662 663
            fluid.io.save_vars(executor=exe, dirname=param_path, main_program=main_prog,
                              vars=None, predicate=name_has_fc)
            fluid.io.load_vars(executor=exe, dirname=param_path, main_program=main_prog,
C
chengduo 已提交
664
                               vars=None, predicate=name_has_fc)
665 666
            # Load All variables in the `main_program` whose name includes "fc".
            # And all the variables are supposed to be saved in separate files.
F
fengjiayi 已提交
667

668
    """
L
lujun 已提交
669
    load_dirname = os.path.normpath(dirname)
T
tangwei12 已提交
670

671
    if vars is None:
672
        if main_program is None:
Y
Yu Yang 已提交
673
            main_program = default_main_program()
674
        if not isinstance(main_program, Program):
675 676 677 678
            raise TypeError("program's type should be Program")

        load_vars(
            executor,
L
lujun 已提交
679
            dirname=load_dirname,
T
tangwei12 已提交
680
            main_program=main_program,
681
            vars=list(filter(predicate, main_program.list_vars())),
682
            filename=filename)
683 684 685
    else:
        load_prog = Program()
        load_block = load_prog.global_block()
686

687 688
        if main_program is None:
            main_program = default_main_program()
T
tangwei12 已提交
689

690 691 692
        if not isinstance(main_program, Program):
            raise TypeError("program should be as Program type or None")

H
hong 已提交
693 694
        #save origin param shape
        orig_para_shape = {}
695
        load_var_map = {}
696 697
        for each_var in vars:
            assert isinstance(each_var, Variable)
T
tangwei12 已提交
698 699
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
H
hong 已提交
700 701

            if isinstance(each_var, Parameter):
702 703
                orig_para_shape[each_var.name] = tuple(each_var.desc.get_shape(
                ))
704
            new_var = _clone_var_in_block_(load_block, each_var)
705
            if filename is None:
706 707 708 709
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [new_var]},
L
lujun 已提交
710 711 712
                    attrs={
                        'file_path': os.path.join(load_dirname, new_var.name)
                    })
713 714 715
            else:
                load_var_map[new_var.name] = new_var

716
        if filename is not None:
717 718 719 720
            load_var_list = []
            for name in sorted(load_var_map.keys()):
                load_var_list.append(load_var_map[name])

721
            load_block.append_op(
722
                type='load_combine',
723
                inputs={},
724
                outputs={"Out": load_var_list},
L
lujun 已提交
725
                attrs={'file_path': os.path.join(load_dirname, filename)})
726 727
        executor.run(load_prog)

H
hong 已提交
728 729 730 731 732 733 734 735 736 737 738 739 740 741 742
        #check var shape
        for each_var in vars:
            if not isinstance(each_var, Parameter):
                continue
            var_temp = paddle.fluid.global_scope().find_var(each_var.name)
            assert var_temp != None, "can't not find var: " + each_var.name
            new_shape = (np.array(var_temp.get_tensor())).shape
            assert each_var.name in orig_para_shape, earch_var.name + "MUST in var list"
            orig_shape = orig_para_shape.get(each_var.name)
            if new_shape != orig_shape:
                raise RuntimeError(
                    "Shape not matching: the Program requires a parameter with a shape of ({}), "
                    "while the loaded parameter (namely [ {} ]) has a shape of  ({}).".
                    format(orig_shape, each_var.name, new_shape))

743

744
def load_params(executor, dirname, main_program=None, filename=None):
745
    """
746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764
    This API filters out all parameters from the give ``main_program``
    and then tries to load these parameters from the directory ``dirname`` or
    the file ``filename``.

    Use the ``dirname`` to specify the directory where parameters were saved. If
    parameters were saved in separate files under the directory `dirname`, set
    ``filename`` as None; if all parameters were saved in a single file, use
    ``filename`` to specify the file name.

    **Note**:
        Some variables are not Parameter while they are necessary for
        training, such as learning rate, global step, etc. So you cannot save and
        continue your training just by using :ref:`api_fluid_io_save_params` and
        :ref:`api_fluid_io_load_params`. Please use :ref:`api_fluid_io_save_persistables`
        and :ref:`api_fluid_io_load_persistables` instead.

        If you want to load the pre-trained model structure and parameters
        for the inference, please use the :ref:`api_fluid_io_load_inference_model` API. You can
        refer to :ref:`api_guide_model_save_reader_en` for more details.
F
fengjiayi 已提交
765 766

    Args:
767 768
        executor(Executor): The executor used for loading parameters.
                            See :ref:`api_guide_executor_en` for more details about it.
F
fengjiayi 已提交
769
        dirname(str): The directory path.
770 771 772 773 774 775 776 777
        main_program(Program, optional): The program whose parameters will be
                                    loaded. If it is None, the ``default_main_program``
                                    will be used automatically. See :ref:`api_guide_Program_en`
                                    for more about ``Program``.
                                    Default: None.
        filename(str, optional): The file which saved all parameters. If parameters
                            were saved in separated files, set it to None.
                            Default: None.
F
fengjiayi 已提交
778 779 780 781 782 783 784

    Returns:
        None

    Examples:
        .. code-block:: python

785
            import paddle.fluid as fluid
786

F
fengjiayi 已提交
787 788 789
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
790
            fluid.io.load_params(executor=exe, dirname=param_path,
F
fengjiayi 已提交
791
                                main_program=None)
792 793
    """
    load_vars(
794 795 796
        executor,
        dirname=dirname,
        main_program=main_program,
797
        predicate=is_parameter,
798
        filename=filename)
799 800


801
def load_persistables(executor, dirname, main_program=None, filename=None):
802
    """
803 804 805
    This API filters out all variables with ``persistable==True`` from the
    given ``main_program`` and then tries to load these variables from the
    directory ``dirnameme`` or the file ``filename``.
F
fengjiayi 已提交
806

807 808 809 810
    Use the ``dirname`` to specify the directory where persistable variables
    (refer to :ref:`api_guide_model_save_reader_en`) were saved. If variables
    were saved in separate files, set ``filename`` as None; if all variables
    were saved in a single file, use ``filename`` to specify the file name.
F
fengjiayi 已提交
811 812

    Args:
813 814
        executor(Executor): The executor used for loading persistable variables.
                            See :ref:`api_guide_executor_en` for more details about it.
F
fengjiayi 已提交
815
        dirname(str): The directory path.
816 817 818 819 820 821 822 823
        main_program(Program, optional): The program whose persistbale variables will
                                    be loaded. If it is None, the ``default_main_program``
                                    will be used automatically. See :ref:`api_guide_Program_en`
                                    for more about ``Program``.
                                    Default: None.
        filename(str, optional): The file which saved all persistable variables. If variables
                                 were saved in separated files, set it to None.
                                 Default: None.
F
fengjiayi 已提交
824 825 826 827 828 829 830

    Returns:
        None

    Examples:
        .. code-block:: python

831
            import paddle.fluid as fluid
832

F
fengjiayi 已提交
833 834 835
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
836
            fluid.io.load_persistables(executor=exe, dirname=param_path,
F
fengjiayi 已提交
837
                                       main_program=None)
838
    """
839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869

    if main_program and main_program._is_distributed:
        _load_distributed_persistables(
            executor, dirname=dirname, main_program=main_program)
    else:
        load_vars(
            executor,
            dirname=dirname,
            main_program=main_program,
            predicate=is_persistable,
            filename=filename)


def _load_distributed_persistables(executor, dirname, main_program=None):
    """
    customized load_persistables for distributed training.
    it should be used on parameter server,

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The load directory path.
        main_program(Program): The program whose parameters will be
                            loaded. the main_program must be the pserver_program
                            get after transpiler.

    Returns:
        None

    Examples:
        .. code-block:: python

870
            import paddle.fluid as fluid
871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            t = distribute_transpiler.DistributeTranspiler()
            t.transpile(...)
            pserver_prog = t.get_pserver_program(...)
            _load_distributed_persistables(executor=exe, dirname=param_path, main_program=pserver_prog)
    """

    def __is_distributed_part_var(varname):
        trainer_idx = varname.find(".trainer_")
        block_idx = varname.find(".block")
        return trainer_idx or block_idx

    def __load_persistable_vars(executor, dirname, need_load_vars):
        load_prog = Program()
        load_block = load_prog.global_block()
        need_delete_vars = []

        for param in need_load_vars:
            origin_var = param.origin
            slice_var = param.slice
            is_slice = param.is_slice
            offset = param.offset

            if is_slice:
                origin = load_block.create_var(
                    name="{}.load".format(origin_var.name),
                    type=origin_var.type,
                    shape=origin_var.shape,
                    dtype=origin_var.dtype,
                    persistable=True)

                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [origin]},
                    attrs={
                        'file_path': os.path.join(dirname, origin_var.name)
                    })

                slice = load_block.create_var(
                    name=slice_var.name,
                    type=slice_var.type,
                    shape=slice_var.shape,
                    dtype=slice_var.dtype,
                    persistable=True)

T
tangwei12 已提交
918 919 920 921
                dim1_flatten = 1
                if len(slice.shape) >= 2:
                    dim1_flatten = reduce(lambda x, y: x * y, slice.shape[1:])

922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955
                start = int(offset / dim1_flatten)
                end = int(offset / dim1_flatten + slice.shape[0])

                load_block.append_op(
                    type="slice",
                    inputs={'Input': origin},
                    outputs={'Out': slice},
                    attrs={'axes': [0],
                           'starts': [start],
                           'ends': [end]})

                need_delete_vars.append(origin)
            else:
                origin = load_block.create_var(
                    name="{}".format(origin_var.name),
                    type=origin_var.type,
                    shape=origin_var.shape,
                    dtype=origin_var.dtype,
                    persistable=True)
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [origin]},
                    attrs={
                        'file_path': os.path.join(dirname, origin_var.name)
                    })

        load_block.append_op(
            type='delete_var',
            inputs={'X': need_delete_vars}, )

        executor.run(load_prog)

    if not isinstance(main_program, Program):
T
tangwei12 已提交
956
        raise TypeError("'main_program' should be an instance of Program.")
957 958 959 960 961 962 963 964 965 966 967 968 969 970

    if not main_program._is_distributed:
        raise ValueError(
            "'_load_distributed_persistables' just be designed for distributed training."
        )

    if not main_program._ps_endpoint:
        raise ValueError(
            "'_load_distributed_persistables' need current_endpoint set in DistributeTranspiler.transpile"
        )

    need_load_vars = main_program._parameters_on_pservers.get_distributed_vars_by_ep(
        main_program._ps_endpoint)
    __load_persistable_vars(executor, dirname, need_load_vars)
971 972


973 974 975
def prepend_feed_ops(inference_program,
                     feed_target_names,
                     feed_holder_name='feed'):
Q
Qiao Longfei 已提交
976 977 978
    if len(feed_target_names) == 0:
        return

K
Kexin Zhao 已提交
979 980
    global_block = inference_program.global_block()
    feed_var = global_block.create_var(
981 982 983
        name=feed_holder_name,
        type=core.VarDesc.VarType.FEED_MINIBATCH,
        persistable=True)
K
Kexin Zhao 已提交
984

985
    for i, name in enumerate(feed_target_names):
K
fix bug  
Kexin Zhao 已提交
986
        out = global_block.var(name)
W
Wu Yi 已提交
987
        global_block._prepend_op(
K
Kexin Zhao 已提交
988 989
            type='feed',
            inputs={'X': [feed_var]},
K
fix bug  
Kexin Zhao 已提交
990
            outputs={'Out': [out]},
K
Kexin Zhao 已提交
991 992 993
            attrs={'col': i})


994 995 996
def append_fetch_ops(inference_program,
                     fetch_target_names,
                     fetch_holder_name='fetch'):
K
Kexin Zhao 已提交
997 998
    global_block = inference_program.global_block()
    fetch_var = global_block.create_var(
999 1000 1001
        name=fetch_holder_name,
        type=core.VarDesc.VarType.FETCH_LIST,
        persistable=True)
K
Kexin Zhao 已提交
1002

1003
    for i, name in enumerate(fetch_target_names):
K
Kexin Zhao 已提交
1004 1005 1006 1007 1008 1009 1010
        global_block.append_op(
            type='fetch',
            inputs={'X': [name]},
            outputs={'Out': [fetch_var]},
            attrs={'col': i})


1011 1012 1013 1014
def save_inference_model(dirname,
                         feeded_var_names,
                         target_vars,
                         executor,
1015
                         main_program=None,
1016
                         model_filename=None,
1017
                         params_filename=None,
T
tangwei12 已提交
1018 1019
                         export_for_deployment=True,
                         program_only=False):
1020
    """
F
fengjiayi 已提交
1021
    Prune the given `main_program` to build a new program especially for inference,
1022
    and then save it and all related parameters to given `dirname` .
1023
    If you just want to save parameters of your trained model, please use the
1024 1025
    :ref:`api_fluid_io_save_params` . You can refer to :ref:`api_guide_model_save_reader_en`
    for more details.
1026

1027 1028 1029 1030 1031
    Note:
        The :code:`dirname` is used to specify the folder where inference model 
        structure and parameters are going to be saved. If you would like to save params of
        Program in separate files, set `params_filename` None; if you would like to save all 
        params of Program in a single file, use `params_filename` to specify the file name.
F
fengjiayi 已提交
1032 1033 1034

    Args:
        dirname(str): The directory path to save the inference model.
1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050
        feeded_var_names(list[str]): list of string. Names of variables that need to be feeded
                                     data during inference.
        target_vars(list[Variable]): list of Variable. Variables from which we can get 
                                     inference results.
        executor(Executor): The executor that saves the inference model. You can refer 
                            to :ref:`api_guide_executor_en` for more details.
        main_program(Program, optional): The original program, which will be pruned to
                                         build the inference model. If is setted None,
                                         the global default :code:`_main_program_` will be used.
                                         Default: None.
        model_filename(str, optional): The name of file to save the inference program
                                       itself. If is setted None, a default filename
                                       :code:`__model__` will be used.
        params_filename(str, optional): The name of file to save all related parameters.
                                        If it is setted None, parameters will be saved
                                        in separate files .
X
Xin Pan 已提交
1051 1052 1053 1054 1055
        export_for_deployment(bool): If True, programs are modified to only support
                                     direct inference deployment. Otherwise,
                                     more information will be stored for flexible
                                     optimization and re-training. Currently, only
                                     True is supported.
1056 1057 1058 1059
                                     Default: True.
        program_only(bool, optional): If True, It will save inference program only, and do not 
                                      save params of Program.
                                      Default: False.
1060

F
fengjiayi 已提交
1061
    Returns:
1062 1063 1064 1065
        The fetch variables' name list

     Return Type:
        list
F
fengjiayi 已提交
1066 1067

    Raises:
1068 1069
        ValueError: If `feed_var_names` is not a list of basestring, an exception is thrown.
        ValueError: If `target_vars` is not a list of Variable, an exception is thrown.
F
fengjiayi 已提交
1070 1071 1072

    Examples:
        .. code-block:: python
F
fengjiayi 已提交
1073

1074 1075
            import paddle.fluid as fluid

F
fengjiayi 已提交
1076 1077
            path = "./infer_model"

1078
            # User defined network, here a softmax regresssion example
1079 1080
            image = fluid.data(name='img', shape=[None, 28, 28], dtype='float32')
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097
            feeder = fluid.DataFeeder(feed_list=[image, label], place=fluid.CPUPlace())
            predict = fluid.layers.fc(input=image, size=10, act='softmax')

            loss = fluid.layers.cross_entropy(input=predict, label=label)
            avg_loss = fluid.layers.mean(loss)

            exe = fluid.Executor(fluid.CPUPlace())
            exe.run(fluid.default_startup_program())

            # Feed data and train process

            # Save inference model. Note we don't save label and loss in this example
            fluid.io.save_inference_model(dirname=path,
                                          feeded_var_names=['img'],
                                          target_vars=[predict],
                                          executor=exe)

1098 1099 1100
            # In this example, the save_inference_mode inference will prune the default
            # main program according to the network's input node (img) and output node(predict). 
            # The pruned inference program is going to be saved in the "./infer_model/__model__"
F
fengjiayi 已提交
1101
            # and parameters are going to be saved in separate files under folder
1102
            # "./infer_model".
1103 1104

    """
M
minqiyang 已提交
1105
    if isinstance(feeded_var_names, six.string_types):
F
fengjiayi 已提交
1106
        feeded_var_names = [feeded_var_names]
X
Xin Pan 已提交
1107
    elif export_for_deployment:
Q
Qiao Longfei 已提交
1108
        if len(feeded_var_names) > 0:
1109
            # TODO(paddle-dev): polish these code blocks
Q
Qiao Longfei 已提交
1110
            if not (bool(feeded_var_names) and all(
M
minqiyang 已提交
1111
                    isinstance(name, six.string_types)
1112
                    for name in feeded_var_names)):
M
minqiyang 已提交
1113
                raise ValueError("'feed_var_names' should be a list of str.")
F
fengjiayi 已提交
1114 1115

    if isinstance(target_vars, Variable):
F
fengjiayi 已提交
1116
        target_vars = [target_vars]
X
Xin Pan 已提交
1117
    elif export_for_deployment:
1118 1119
        if not (bool(target_vars) and
                all(isinstance(var, Variable) for var in target_vars)):
F
fengjiayi 已提交
1120 1121
            raise ValueError("'target_vars' should be a list of Variable.")

C
chengduo 已提交
1122
    main_program = _get_valid_program(main_program)
T
tangwei12 已提交
1123

1124 1125 1126 1127 1128 1129 1130 1131 1132
    # remind user to set auc_states to zeros if the program contains auc op 
    all_ops = main_program.global_block().ops
    for op in all_ops:
        if op.type == 'auc':
            warnings.warn(
                "please ensure that you have set the auc states to zeros before saving inference model"
            )
            break

1133 1134 1135 1136 1137
    # fix the bug that the activation op's output as target will be pruned.
    # will affect the inference performance.
    # TODO(Superjomn) add an IR pass to remove 1-scale op.
    with program_guard(main_program):
        uniq_target_vars = []
F
flame 已提交
1138
        for i, var in enumerate(target_vars):
1139
            if isinstance(var, Variable):
F
flame 已提交
1140 1141 1142
                var = layers.scale(
                    var, 1., name="save_infer_model/scale_{}".format(i))
            uniq_target_vars.append(var)
1143
        target_vars = uniq_target_vars
F
flame 已提交
1144
    target_var_name_list = [var.name for var in target_vars]
1145

1146
    # when a pserver and a trainer running on the same machine, mkdir may conflict
L
lujun 已提交
1147
    save_dirname = dirname
1148
    try:
L
lujun 已提交
1149 1150
        save_dirname = os.path.normpath(dirname)
        os.makedirs(save_dirname)
1151 1152 1153 1154
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

X
Xin Pan 已提交
1155 1156 1157 1158
    if model_filename is not None:
        model_basename = os.path.basename(model_filename)
    else:
        model_basename = "__model__"
L
lujun 已提交
1159
    model_basename = os.path.join(save_dirname, model_basename)
1160

X
Xin Pan 已提交
1161 1162 1163 1164
    # When export_for_deployment is true, we modify the program online so that
    # it can only be loaded for inference directly. If it's false, the whole
    # original program and related meta are saved so that future usage can be
    # more flexible.
1165 1166 1167

    origin_program = main_program.clone()

X
Xin Pan 已提交
1168
    if export_for_deployment:
X
Xin Pan 已提交
1169 1170
        main_program = main_program.clone()
        global_block = main_program.global_block()
1171
        need_to_remove_op_index = []
X
Xin Pan 已提交
1172 1173 1174
        for i, op in enumerate(global_block.ops):
            op.desc.set_is_target(False)
            if op.type == "feed" or op.type == "fetch":
1175 1176 1177 1178 1179
                need_to_remove_op_index.append(i)

        for index in need_to_remove_op_index[::-1]:
            global_block._remove_op(index)

X
Xin Pan 已提交
1180
        main_program.desc.flush()
X
Xin Pan 已提交
1181

1182 1183
        main_program = main_program._prune_with_input(
            feeded_var_names=feeded_var_names, targets=target_vars)
X
Xin Pan 已提交
1184
        main_program = main_program._inference_optimize(prune_read_op=True)
X
Xin Pan 已提交
1185 1186
        fetch_var_names = [v.name for v in target_vars]

X
Xin Pan 已提交
1187 1188 1189
        prepend_feed_ops(main_program, feeded_var_names)
        append_fetch_ops(main_program, fetch_var_names)

1190 1191
        main_program.desc._set_version()
        paddle.fluid.core.save_op_compatible_info(main_program.desc)
X
Xin Pan 已提交
1192 1193
        with open(model_basename, "wb") as f:
            f.write(main_program.desc.serialize_to_string())
X
Xin Pan 已提交
1194 1195 1196
    else:
        # TODO(panyx0718): Save more information so that it can also be used
        # for training and more flexible post-processing.
X
Xin Pan 已提交
1197 1198
        with open(model_basename + ".main_program", "wb") as f:
            f.write(main_program.desc.serialize_to_string())
T
tangwei12 已提交
1199

T
tangwei12 已提交
1200 1201 1202 1203 1204 1205
    if program_only:
        warnings.warn(
            "save_inference_model specified the param `program_only` to True, It will not save params of Program."
        )
        return target_var_name_list

1206 1207
    main_program._copy_dist_param_info_from(origin_program)

X
fix  
Xin Pan 已提交
1208 1209
    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
1210

L
lujun 已提交
1211
    save_persistables(executor, save_dirname, main_program, params_filename)
F
flame 已提交
1212
    return target_var_name_list
X
fix  
Xin Pan 已提交
1213

1214

1215 1216 1217
def load_inference_model(dirname,
                         executor,
                         model_filename=None,
T
tangwei12 已提交
1218 1219
                         params_filename=None,
                         pserver_endpoints=None):
1220
    """
1221 1222 1223
    Load the inference model from a given directory. By this API, you can get the model
    structure(Inference Program) and model parameters. If you just want to load
    parameters of the pre-trained model, please use the :ref:`api_fluid_io_load_params` API.
1224
    You can refer to :ref:`api_guide_model_save_reader_en` for more details.
1225

F
fengjiayi 已提交
1226
    Args:
1227
        dirname(str): The given directory path.
F
fengjiayi 已提交
1228
        executor(Executor): The executor to run for loading inference model.
1229 1230
                            See :ref:`api_guide_executor_en` for more details about it.
        model_filename(str, optional): The name of file to load the inference program.
1231
                                  If it is None, the default filename
1232 1233 1234
                                  ``__model__`` will be used.
                                  Default: ``None``.
        params_filename(str, optional): The name of file to load all parameters.
1235 1236 1237
                                   It is only used for the case that all
                                   parameters were saved in a single binary
                                   file. If parameters were saved in separate
1238 1239 1240 1241 1242 1243
                                   files, set it as ``None``.
                                   Default: ``None``.

        pserver_endpoints(list, optional): It is only needed by the distributed inference.
                                    If using a distributed look up table during the training,
                                    this table is also needed by the inference process. Its value is
1244
                                    a list of pserver endpoints.
F
fengjiayi 已提交
1245 1246

    Returns:
1247
        list: The return of this API is a list with three elements:
1248
        (program, feed_target_names, fetch_targets). The `program` is a
1249 1250 1251 1252 1253
        ``Program`` (refer to :ref:`api_guide_Program_en`), which is used for inference.
        The `feed_target_names` is a list of ``str``, which contains names of variables
        that need to feed data in the inference program. The `fetch_targets` is a list of
        ``Variable`` (refer to :ref:`api_guide_Program_en`). It contains variables from which
        we can get inference results.
F
fengjiayi 已提交
1254 1255 1256 1257 1258 1259 1260

    Raises:
        ValueError: If `dirname` is not a existing directory.

    Examples:
        .. code-block:: python

1261 1262
            import paddle.fluid as fluid
            import numpy as np
1263 1264

            # Build the model
1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
1276 1277

            # Save the inference model
F
fengjiayi 已提交
1278
            path = "./infer_model"
1279 1280
            fluid.io.save_inference_model(dirname=path, feeded_var_names=['img'],
                         target_vars=[hidden_b], executor=exe, main_program=main_prog)
1281 1282 1283

            # Demo one. Not need to set the distributed look up table, because the
            # training doesn't use a distributed look up table.
1284 1285
            [inference_program, feed_target_names, fetch_targets] = (
                fluid.io.load_inference_model(dirname=path, executor=exe))
1286
            tensor_img = np.array(np.random.random((1, 64, 784)), dtype=np.float32)
F
fengjiayi 已提交
1287 1288 1289 1290
            results = exe.run(inference_program,
                          feed={feed_target_names[0]: tensor_img},
                          fetch_list=fetch_targets)

1291 1292 1293
            # Demo two. If the training uses a distributed look up table, the pserver
            # endpoints list should be supported when loading the inference model.
            # The below is just an example.
1294
            endpoints = ["127.0.0.1:2023","127.0.0.1:2024"]
1295
            [dist_inference_program, dist_feed_target_names, dist_fetch_targets] = (
1296 1297
                fluid.io.load_inference_model(dirname=path,
                                              executor=exe,
1298
                                              pserver_endpoints=endpoints))
1299

1300
            # In this example, the inference program was saved in the file
1301
            # "./infer_model/__model__" and parameters were saved in
1302 1303 1304 1305
            # separate files under the directory "./infer_model".
            # By the inference program, feed_target_names and
            # fetch_targets, we can use an executor to run the inference
            # program for getting the inference result.
1306
    """
L
lujun 已提交
1307 1308
    load_dirname = os.path.normpath(dirname)
    if not os.path.isdir(load_dirname):
1309 1310
        raise ValueError("There is no directory named '%s'", dirname)

1311 1312
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
1313
    else:
1314
        model_filename = "__model__"
L
lujun 已提交
1315
    model_filename = os.path.join(load_dirname, model_filename)
1316 1317 1318

    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
1319

1320
    with open(model_filename, "rb") as f:
1321 1322
        program_desc_str = f.read()

1323
    program = Program.parse_from_string(program_desc_str)
X
Xin Pan 已提交
1324
    if not core._is_program_version_supported(program._version()):
X
version  
Xin Pan 已提交
1325 1326 1327
        raise ValueError("Unsupported program version: %d\n" %
                         program._version())
    # Binary data also need versioning.
L
lujun 已提交
1328
    load_persistables(executor, load_dirname, program, params_filename)
1329

T
tangwei12 已提交
1330
    if pserver_endpoints:
T
tangwei12 已提交
1331
        program = _endpoints_replacement(program, pserver_endpoints)
T
tangwei12 已提交
1332

1333 1334
    feed_target_names = program.desc.get_feed_target_names()
    fetch_target_names = program.desc.get_fetch_target_names()
1335 1336 1337 1338 1339
    fetch_targets = [
        program.global_block().var(name) for name in fetch_target_names
    ]

    return [program, feed_target_names, fetch_targets]
X
xuwei06 已提交
1340 1341


T
tangwei12 已提交
1342 1343 1344
def _endpoints_replacement(program, endpoints):
    ENDPOINT_MAP = "epmap"
    for op in program.global_block().ops:
T
tangwei12 已提交
1345 1346
        if op.has_attr(ENDPOINT_MAP):
            op.set_attr(ENDPOINT_MAP, endpoints)
T
fix  
tangwei12 已提交
1347
    program._sync_with_cpp()
T
tangwei12 已提交
1348
    return program
T
tangwei12 已提交
1349 1350


X
xuwei06 已提交
1351 1352
def get_parameter_value(para, executor):
    """
F
fengjiayi 已提交
1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363
    Get the LoDTensor value of the given parameter.

    Args:
        para(Parameter): The parameter to get value from.
        executor(Executor): The executor to run for retrieving the value.

    Returns:
        numpy.array: The given parameter's values.

    Raises:
        AssertionError: If the `para` is not an instance of Parameter.
X
xuwei06 已提交
1364

F
fengjiayi 已提交
1365 1366
    Examples:
        .. code-block:: python
X
xuwei06 已提交
1367

1368
            import paddle.fluid as fluid
F
fengjiayi 已提交
1369 1370 1371
            exe = fluid.Executor(fluid.CPUPlace())
            param = fluid.default_main_program().global_block().var('fc.w')
            p = fluid.io.get_parameter_value(param, exe)
1372

X
xuwei06 已提交
1373
    """
X
xuwei06 已提交
1374 1375
    assert is_parameter(para)

X
xuwei06 已提交
1376 1377 1378 1379 1380 1381 1382 1383
    get_program = Program()
    block = get_program.global_block()
    new_var = _clone_var_in_block_(block, para)
    return executor.run(get_program, feed={}, fetch_list=[new_var])[0]


def get_parameter_value_by_name(name, executor, program=None):
    """
F
fengjiayi 已提交
1384
    Get the LoDTensor value of a certain parameter by its name.
X
xuwei06 已提交
1385

F
fengjiayi 已提交
1386 1387 1388 1389 1390 1391 1392
    Args:
        name(str): The parameter's name.
        executor(Executor): The executor to run for retrieving the value.
        program(Program | None): The program where to find the parameter.
                               If it's set to be None, the function will
                               try to find the parameter in the default
                               main program.
X
xuwei06 已提交
1393

F
fengjiayi 已提交
1394 1395
    Returns:
        numpy.array: The parameter's values.
1396

F
fengjiayi 已提交
1397 1398 1399 1400 1401
    Raises:
        TypeError: If given `name` is not an instance of basestring.
        TypeError: If the parameter with the given name doesn't exist.
        AssertionError: If there is a varibale named `name` in the
                        given program but it is not a Parameter.
1402

F
fengjiayi 已提交
1403 1404 1405
    Examples:
        .. code-block:: python

1406
            import paddle.fluid as fluid
F
fengjiayi 已提交
1407 1408
            exe = fluid.Executor(fluid.CPUPlace())
            p = fluid.io.get_parameter_value('fc.w', exe)
X
xuwei06 已提交
1409 1410
    """
    if program is None:
Y
Yu Yang 已提交
1411
        program = default_main_program()
X
xuwei06 已提交
1412 1413
    var = program.global_block().var(name)
    return get_parameter_value(var, executor)
1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490


def _save_persistable_nodes(executor, dirname, graph):
    """
    Save persistable nodes to the given directory by the executor.

    Args:
        executor(Executor): The executor to run for saving node values.
        dirname(str): The directory path.
        graph(IrGraph): All the required persistable nodes in the graph will be saved.
    """
    persistable_node_names = set()
    persistable_nodes = []
    all_persistable_nodes = graph.all_persistable_nodes()
    for node in all_persistable_nodes:
        name = cpt.to_text(node.name())
        if name not in persistable_node_names:
            persistable_node_names.add(name)
            persistable_nodes.append(node)
    program = Program()
    var_list = []
    for node in persistable_nodes:
        var_desc = node.var()
        if var_desc.type() == core.VarDesc.VarType.RAW or \
                var_desc.type() == core.VarDesc.VarType.READER:
            continue
        var = program.global_block().create_var(
            name=var_desc.name(),
            shape=var_desc.shape(),
            dtype=var_desc.dtype(),
            type=var_desc.type(),
            lod_level=var_desc.lod_level(),
            persistable=var_desc.persistable())
        var_list.append(var)
    save_vars(executor=executor, dirname=dirname, vars=var_list)


def _load_persistable_nodes(executor, dirname, graph):
    """
    Load persistable node values from the given directory by the executor.

    Args:
        executor(Executor): The executor to run for loading node values.
        dirname(str): The directory path.
        graph(IrGraph): All the required persistable nodes in the graph will be loaded.
    """
    persistable_node_names = set()
    persistable_nodes = []
    all_persistable_nodes = graph.all_persistable_nodes()
    for node in all_persistable_nodes:
        name = cpt.to_text(node.name())
        if name not in persistable_node_names:
            persistable_node_names.add(name)
            persistable_nodes.append(node)
    program = Program()
    var_list = []

    def _exist(var):
        return os.path.exists(os.path.join(dirname, var.name))

    for node in persistable_nodes:
        var_desc = node.var()
        if var_desc.type() == core.VarDesc.VarType.RAW or \
                var_desc.type() == core.VarDesc.VarType.READER:
            continue
        var = program.global_block().create_var(
            name=var_desc.name(),
            shape=var_desc.shape(),
            dtype=var_desc.dtype(),
            type=var_desc.type(),
            lod_level=var_desc.lod_level(),
            persistable=var_desc.persistable())
        if _exist(var):
            var_list.append(var)
        else:
            _logger.warn("Cannot find the var %s!!!" % (node.name()))
    load_vars(executor=executor, dirname=dirname, vars=var_list)
H
hong 已提交
1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521


def save(program, model_path):
    """
    This function save parameters, optimizer information and network description to  model_path.

    The parameters contains all the trainable Variable, will save to a file with suffix ".pdparams".
    The optimizer information contains all the variable used by optimizer. For Adam optimizer, contains beta1, beta2, momentum etc. All the information will save to a file with suffix ".pdopt". (If the optimizer have no variable need to save (like SGD), the fill will not generated).
    The network description is the description of the program. It's only used for deployment. The description  will save to a file with a suffix ".pdmodel".
    
    Args:
        program(Program) : The program to saved.
        model_path(str): the file prefix to save the program. The format is "dirname/file_prefix". If file_prefix is empty str. A exception will be raised

    Returns:
        None

    Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            prog = fluid.default_main_program()
            fluid.save( prog, "./temp")

    """

    base_name = os.path.basename(model_path)
    assert base_name != "", \
            "model_path MUST be format of dirname/filename [dirname\\filename in Window], Now filename is empty str"

H
hong 已提交
1522 1523 1524 1525
    def get_tensor(var):
        t = global_scope().find_var(var.name).get_tensor()
        return np.array(t)

H
hong 已提交
1526
    parameter_list = list(filter(is_parameter, program.list_vars()))
H
hong 已提交
1527 1528 1529
    param_dict = {p.name: get_tensor(p) for p in parameter_list}
    with open(model_path + ".pdparams", 'wb') as f:
        pickle.dump(param_dict, f)
H
hong 已提交
1530 1531 1532 1533

    optimizer_var_list = list(
        filter(is_belong_to_optimizer, program.list_vars()))

H
hong 已提交
1534 1535 1536
    opt_dict = {p.name: get_tensor(p) for p in optimizer_var_list}
    with open(model_path + ".pdopt", 'wb') as f:
        pickle.dump(opt_dict, f)
H
hong 已提交
1537 1538 1539 1540 1541 1542 1543 1544 1545 1546

    main_program = program.clone()
    program.desc.flush()
    main_program.desc._set_version()
    paddle.fluid.core.save_op_compatible_info(program.desc)

    with open(model_path + ".pdmodel", "wb") as f:
        f.write(program.desc.serialize_to_string())


H
hong 已提交
1547
def load(program, model_path, executor=None):
H
hong 已提交
1548 1549
    """
    This function filter out parameters and optimizer information from program, and then get corresponding value from file.
H
hong 已提交
1550
    An exception will throw if shape or dtype of the parameters is not match.
H
hong 已提交
1551 1552

    Args: 
H
hong 已提交
1553 1554 1555 1556
        program(Program): The program will be loaded
        model_path(str): The file prefix store the program
        executor(Executor, optional): The executor used for initialize the parameter 
                                      When startup program is not run.
H
hong 已提交
1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572

    Returns:
        None
        
     Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            prog = fluid.default_main_program()
            fluid.save( prog, "./temp")

            fluid.load( prog, "./temp")

    """

H
hong 已提交
1573 1574
    assert executor is None or isinstance(executor, Executor)

H
hong 已提交
1575 1576
    parameter_file_name = model_path + ".pdparams"
    assert os.path.exists(parameter_file_name), \
H
hong 已提交
1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591
            "Parameter file [{}] not exits".format(parameter_file_name)

    def set_var(var, ndarray):
        t = global_scope().find_var(var.name).get_tensor()
        p = t._place()
        if p.is_cpu_place():
            place = paddle.fluid.CPUPlace()
        elif p.is_cuda_pinned_place():
            place = paddle.fluid.CUDAPinnedPlace()
        else:
            p = paddle.fluid.core.Place()
            p.set_place(t._place())
            place = paddle.fluid.CUDAPlace(p.gpu_device_id())

        t.set(ndarray, place)
H
hong 已提交
1592 1593

    parameter_list = list(filter(is_parameter, program.list_vars()))
H
hong 已提交
1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605

    if executor:
        paddle.fluid.core._create_loaded_parameter(parameter_list,
                                                   global_scope(),
                                                   executor._default_executor)
    with open(parameter_file_name, 'rb') as f:
        load_dict = pickle.load(f)
    for v in parameter_list:
        assert v.name in load_dict, \
            "Can not find [{}] in model file [{}]".format(
                v.name, parameter_file_name)
        set_var(v, load_dict[v.name])
H
hong 已提交
1606 1607 1608 1609 1610 1611 1612 1613

    optimizer_var_list = list(
        filter(is_belong_to_optimizer, program.list_vars()))

    if len(optimizer_var_list) > 0:
        opt_file_name = model_path + ".pdopt"
        assert os.path.exists(opt_file_name), \
                "Optimizer file [{}] not exits".format( opt_file_name)
H
hong 已提交
1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748

        if executor:
            paddle.fluid.core._create_loaded_parameter(
                optimizer_var_list, global_scope(), executor._default_executor)

        with open(opt_file_name, 'rb') as f:
            load_dict = pickle.load(f)
        for v in optimizer_var_list:
            assert v.name in load_dict, \
                "Can not find [{}] in model file [{}]".format(
                    v.name, opt_file_name)
            set_var(v, load_dict[v.name])


def load_program_state(model_path):
    """
    Load program state from local file
    
    Args:
        model_path(str): The file prefix store the program
    Returns:
        state_dict(dict): the dict store Parameter and optimizer information

    Examples:
        .. code-block:: python

            import paddle.fluid as fluid
            x = fluid.data( name="x", shape=[10, 10], dtype='float32')
            y = fluid.layers.fc( x, 10)
            z = fluid.layers.fc( y, 10)

            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run( fluid.default_startup_program() )
            prog = fluid.default_main_program()

            fluid.save( prog, "./temp")
            program_state = fluid.load_program_state( "./temp")
            
            fluid.set_program_state( prog, program_state)

    """
    parameter_file_name = model_path + ".pdparams"
    assert os.path.exists(parameter_file_name), \
            "Parameter file [{}] not exits".format( parameter_file_name)

    with open(parameter_file_name, 'rb') as f:
        para_dict = pickle.load(f)

    opt_file_name = model_path + ".pdopt"
    if os.path.exists(opt_file_name):
        with open(opt_file_name, 'rb') as f:
            opti_dict = pickle.load(f)

        para_dict.update(opti_dict)

    return para_dict


def set_program_state(program, state_dict):
    """
    Set program parameter from state_dict

    An exception will throw if shape or dtype of the parameters is not match. 

    NOTICE: This function MUST called after run start_up_program

    Args:
        program(Program): The program to be set
        state_dict(dict): the dict store Parameter and optimizer information
    Returns: 
        None
    
    Examples:
        .. code-block:: python
            
            import paddle.fluid as fluid
            x = fluid.data( name="x", shape=[10, 10], dtype='float32')
            y = fluid.layers.fc( x, 10)
            z = fluid.layers.fc( y, 10)

            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run( fluid.default_startup_program() )
            prog = fluid.default_main_program()

            fluid.save( prog, "./temp")
            program_state = fluid.load_program_state( "./temp")

    """
    parameter_list = list(filter(is_persistable, program.list_vars()))

    used_para_list = {}
    for para in parameter_list:
        var_temp = paddle.fluid.global_scope().find_var(para.name)
        assert var_temp != None, \
                "Variable [ {} ] Not found, Please make sure run startup program".format( para.name )
        if para.name in state_dict:
            # set value from state dict
            orig_para_np = np.array(var_temp.get_tensor())
            new_para_np = state_dict[para.name]
            assert orig_para_np.shape == new_para_np.shape,  \
                    "Shape not matching: the Program requires a parameter with a shape of ({}), " \
                    "while the loaded parameter (namely [ {} ]) has a shape of  ({})." \
                    .format(orig_para_np.shape, para.name, new_para_np.shape)
            assert orig_para_np.dtype == new_para_np.dtype,  \
                    "Dtype not matching: the Program requires a parameter with a dtype of ({}), " \
                    "while the loaded parameter (namely [ {} ]) has a dtype of  ({})." \
                    .format(orig_para_np.dtype, para.name, new_para_np.dtype)

            ten = var_temp.get_tensor()
            ten_place = ten._place()

            assert ten_place.is_gpu_place() or ten_place.is_cpu_place(), \
                    "Place not support, only support CPUPlace and GPUPlace, now is {}".format( str(ten_place))
            py_place = paddle.fluid.CPUPlace()
            if ten_place.is_cuda_pinned_place():
                place = paddle.fluid.CUDAPinnedPlace()
            elif ten_place.is_gpu_place():
                p = paddle.fluid.core.Place()
                p.set_place(ten_place)
                py_place = paddle.fluid.CUDAPlace(p.gpu_device_id())

            ten.set(new_para_np, py_place)

            used_para_list[para.name] = 1

    unused_para_list = []
    for k, v in state_dict.items():
        if k not in used_para_list:
            unused_para_list.append(k)
    if len(unused_para_list) > 0:
        warnings.warn(
            "This list is not set, Because of Paramerter not found in program. There are: {}".
            format(" ".join(unused_para_list)))