io.py 60.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

17
import os
T
bug fix  
tangwei12 已提交
18
import errno
D
dzhwinter 已提交
19
import warnings
20
import six
21
import logging
22
from functools import reduce
23

H
hong 已提交
24 25
import numpy as np

26 27 28
import paddle
import paddle.reader
from paddle.reader import *
29
from paddle.fluid import layers
H
hong 已提交
30
from paddle.fluid.executor import Executor, global_scope
31
from paddle.fluid.evaluator import Evaluator
32
from paddle.fluid.framework import Program, Parameter, default_main_program, default_startup_program, Variable, program_guard
T
tangwei12 已提交
33
from paddle.fluid.compiler import CompiledProgram
34
from paddle.fluid.log_helper import get_logger
S
sneaxiy 已提交
35 36
from . import reader
from .reader import *
K
fix bug  
Kexin Zhao 已提交
37
from . import core
38
from .. import compat as cpt
39

40 41
batch = paddle.batch

42
__all__ = [
T
tangwei12 已提交
43
    'save_vars', 'save_params', 'save_persistables', 'load_vars', 'load_params',
H
hong 已提交
44 45
    'load_persistables', 'save_inference_model', 'load_inference_model',
    'batch', 'save', 'load'
46
] + reader.__all__ + paddle.reader.__all__
47

48 49
_logger = get_logger(
    __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s')
50

51 52

def is_parameter(var):
F
fengjiayi 已提交
53 54
    """
    Check whether the given variable is an instance of Parameter.
55 56

    Args:
F
fengjiayi 已提交
57
        var(Variable): The variable to be checked.
58 59

    Returns:
F
fengjiayi 已提交
60 61 62 63 64 65
        bool: True if the given `var` is an instance of Parameter,
        False if not.

    Examples:
        .. code-block:: python

66
            import paddle.fluid as fluid
F
fengjiayi 已提交
67 68
            param = fluid.default_main_program().global_block().var('fc.w')
            res = fluid.io.is_parameter(param)
69
    """
70 71 72 73
    return isinstance(var, Parameter)


def is_persistable(var):
F
fengjiayi 已提交
74 75 76 77 78 79 80 81 82 83 84 85 86
    """
    Check whether the given variable is persistable.

    Args:
        var(Variable): The variable to be checked.

    Returns:
        bool: True if the given `var` is persistable
        False if not.

    Examples:
        .. code-block:: python

87
            import paddle.fluid as fluid
88
            param = fluid.default_main_program().global_block().var('fc.b')
F
fengjiayi 已提交
89 90
            res = fluid.io.is_persistable(param)
    """
91
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
Y
yuyang18 已提交
92 93
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
            var.desc.type() == core.VarDesc.VarType.READER:
94
        return False
95 96 97
    return var.persistable


H
hong 已提交
98 99 100 101
def is_belong_to_optimizer(var):
    return var.belong_to_optimizer


102 103
def _clone_var_in_block_(block, var):
    assert isinstance(var, Variable)
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
    if var.desc.type() == core.VarDesc.VarType.LOD_TENSOR:
        return block.create_var(
            name=var.name,
            shape=var.shape,
            dtype=var.dtype,
            type=var.type,
            lod_level=var.lod_level,
            persistable=True)
    else:
        return block.create_var(
            name=var.name,
            shape=var.shape,
            dtype=var.dtype,
            type=var.type,
            persistable=True)
119 120


C
chengduo 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134
def _get_valid_program(main_program):
    if main_program is None:
        main_program = default_main_program()
    elif isinstance(main_program, CompiledProgram):
        main_program = main_program._program
        if main_program is None:
            raise TypeError("program should be as Program type or None")
        warnings.warn(
            "The input is a CompiledProgram, this is not recommended.")
    if not isinstance(main_program, Program):
        raise TypeError("program should be as Program type or None")
    return main_program


135 136 137 138 139
def save_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
140
              filename=None):
141
    """
F
fengjiayi 已提交
142 143
    Save variables to the given directory by executor.

144 145 146 147
    There are two ways to specify variables to be saved: The first way, list
    variables in a list and assign it to the `vars`. The second way, assign the
    `main_program` with an existing program, then all variables in the program
    will be saved. The first way has a higher priority. In other words, if `vars`
F
fengjiayi 已提交
148
    are assigned, the `main_program` and the `predicate` will be ignored.
149

150 151 152
    The `dirname` are used to specify the folder where to save variables.
    If you prefer to save variables in separate files in the folder `dirname`,
    set `filename` None; if you prefer to save all variables in a single file,
F
fengjiayi 已提交
153
    use `filename` to specify it.
154

F
fengjiayi 已提交
155 156 157
    Args:
        executor(Executor): The executor to run for saving variables.
        dirname(str): The directory path.
158 159
        main_program(Program|None): The program whose variables will be saved.
                                    If it is None, the default main program will
F
fengjiayi 已提交
160 161
                                    be used automatically.
                                    Default: None
162
        vars(list[Variable]|None): The list that contains all variables to save.
F
fengjiayi 已提交
163 164
                                   It has a higher priority than the `main_program`.
                                   Default: None
165 166 167 168
        predicate(function|None): If it is not None, only variables in the
                                  `main_program` that makes predicate(variable)==True
                                  will be saved. It only works when we are using the
                                  `main_program` to specify variables (In other words
F
fengjiayi 已提交
169 170
                                  `vars` is None).
                                  Default: None
171
        filename(str|None): The file which to save all variables. If you prefer to save
F
fengjiayi 已提交
172 173 174 175 176 177 178 179 180 181 182 183
                            variables separately, set it to None.
                            Default: None

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

184 185 186 187 188 189 190 191 192 193 194 195
            import paddle.fluid as fluid
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32', name='fc_w')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32', name='fc_b')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
F
fengjiayi 已提交
196

197
            param_path = "./my_paddle_model"
F
fengjiayi 已提交
198 199 200 201
            # The first usage: using `main_program` to specify variables
            def name_has_fc(var):
                res = "fc" in var.name
                return res
202
            fluid.io.save_vars(executor=exe, dirname=param_path, main_program=main_prog,
C
chengduo 已提交
203
                               vars=None, predicate = name_has_fc)
F
fengjiayi 已提交
204 205 206 207 208
            # All variables in `main_program` whose name includes "fc" will be saved.
            # And variables are going to be saved separately.


            # The second usage: using `vars` to specify variables
209 210
            var_list = [w, b]
            path = "./my_paddle_vars"
211
            fluid.io.save_vars(executor=exe, dirname=path, vars=var_list,
F
fengjiayi 已提交
212 213
                               filename="vars_file")
            # var_a, var_b and var_c will be saved. And they are going to be
214
            # saved in the same file named 'var_file' in the path "./my_paddle_vars".
215
    """
L
lujun 已提交
216
    save_dirname = os.path.normpath(dirname)
C
chengduo 已提交
217
    main_program = _get_valid_program(main_program)
T
tangwei12 已提交
218

219 220 221
    if vars is None:
        save_vars(
            executor,
222
            main_program=main_program,
L
lujun 已提交
223
            dirname=save_dirname,
224
            vars=list(filter(predicate, main_program.list_vars())),
225
            filename=filename)
226
    else:
227 228 229 230 231 232 233
        # give warning when there is no var in model
        if len(list(vars)) == 0:
            warnings.warn(
                "no variable in your model, please ensure there are any variables in your model to save"
            )
            return None

234 235
        save_program = Program()
        save_block = save_program.global_block()
236 237

        save_var_map = {}
238
        for each_var in vars:
239 240 241
            # NOTE: don't save the variable which type is RAW
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
242
            new_var = _clone_var_in_block_(save_block, each_var)
243
            if filename is None:
244 245
                save_file_path = os.path.join(save_dirname, new_var.name)
                save_file_path = os.path.normpath(save_file_path)
246 247 248 249
                save_block.append_op(
                    type='save',
                    inputs={'X': [new_var]},
                    outputs={},
250
                    attrs={'file_path': save_file_path})
251 252 253
            else:
                save_var_map[new_var.name] = new_var

254
        if filename is not None:
255 256 257 258
            save_var_list = []
            for name in sorted(save_var_map.keys()):
                save_var_list.append(save_var_map[name])

259
            save_block.append_op(
260 261
                type='save_combine',
                inputs={'X': save_var_list},
262
                outputs={},
L
lujun 已提交
263
                attrs={'file_path': os.path.join(save_dirname, filename)})
264

265 266 267
        executor.run(save_program)


268
def save_params(executor, dirname, main_program=None, filename=None):
269
    """
F
fengjiayi 已提交
270 271 272
    This function filters out all parameters from the give `main_program`
    and then save them to the folder `dirname` or the file `filename`.

273 274 275
    Use the `dirname` to specify the saving folder. If you would like to
    save parameters in separate files, set `filename` None; if you would
    like to save all parameters in a single file, use `filename` to specify
F
fengjiayi 已提交
276 277
    the file name.

278 279 280
    NOTICE: Some variables are not Parameter while they are necessary for
    training. So you can NOT save and continue your training just by
    `save_params()` and `load_params()`. Please use `save_persistables()`
281 282 283
    and `load_persistables()` instead. If you want to save your model for
    the inference, please use the `save_inference_model` API. You can refer
    to :ref:`api_guide_model_save_reader_en` for more details.
F
fengjiayi 已提交
284 285 286 287 288 289 290 291

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The saving directory path.
        main_program(Program|None): The program whose parameters will be
                                    saved. If it is None, the default
                                    main program will be used automatically.
                                    Default: None
292 293
        filename(str|None): The file to save all parameters. If you prefer
                            to save parameters in differnet files, set it
F
fengjiayi 已提交
294 295 296 297 298 299 300 301 302
                            to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

H
Huihuang Zheng 已提交
303 304
            import paddle.fluid as fluid

F
fengjiayi 已提交
305 306 307
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
308
            fluid.io.save_params(executor=exe, dirname=param_path,
F
fengjiayi 已提交
309
                                 main_program=None)
310 311 312 313
    """
    save_vars(
        executor,
        dirname=dirname,
314
        main_program=main_program,
315
        vars=None,
316
        predicate=is_parameter,
317
        filename=filename)
318 319


320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341
def _save_distributed_persistables(executor, dirname, main_program):
    """
    save_persistables for distributed training.
    the method will do things listed below:
    1.save part of persistable variables on trainer.
    2.receive "remote prefetch variables" from parameter servers and merge them.
    3.save "distributed lookup table" on parameter servers.
    4.receive "optimizer variables" from parameter servers and merge them.

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The saving directory path.
        main_program(Program): The program whose parameters will be
                            saved. the main_program must be the trainer_program
                            get after transpiler.

    Returns:
        None

    Examples:
        .. code-block:: python

342
            import paddle.fluid as fluid
343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            t = distribute_transpiler.DistributeTranspiler()
            t.transpile(...)
            train_program = t.get_trainer_program()
            _save_distributed_persistables(executor=exe, dirname=param_path, main_program=train_program)
    """

    def __save_remote_params(executor, dirname, remote_params_map):
        """
        recive params on pserver through rpc.
        if the params are be sliced, will concat them to one, then save it.
        """
        if not remote_params_map:
            return

        prog = Program()
        block = prog.global_block()

        # recv optimize vars from pserver
        for name, remote_params in remote_params_map.items():
            origin_var = None
            is_slice = False
            slice_vars = [0] * len(remote_params)
            slice_var_names = [""] * len(remote_params)
            endpoints = [""] * len(remote_params)

            for idx, optimizer in enumerate(remote_params):
                origin = optimizer.origin
                slice = optimizer.slice
                is_slice = optimizer.is_slice
                block_id = optimizer.block_id
                endpoint = optimizer.endpoint

                if idx == 0:
                    origin_var = block.create_var(
                        name=origin.name,
                        type=origin.type,
                        shape=origin.shape,
                        dtype=origin.dtype,
                        persistable=True)

                slice_var = block.create_var(
                    name="{}.slice.{}".format(slice.name, idx),
                    type=slice.type,
                    shape=slice.shape,
                    dtype=slice.dtype,
                    persistable=True)

                index = block_id if is_slice else idx
                slice_vars[index] = slice_var
                slice_var_names[index] = slice.name
                endpoints[index] = endpoint

            if is_slice:
                block.append_op(
                    type='recv',
                    inputs={"X": []},
                    outputs={"Out": slice_vars},
                    attrs={
                        "epmap": endpoints,
                        "with_barrier": False,
                        "varnames": slice_var_names,
                        "sync_mode": True
                    })
                block.append_op(
                    type='concat',
                    inputs={'X': slice_vars},
                    outputs={'Out': origin_var},
                    attrs={})
            else:
                block.append_op(
                    type='recv',
                    inputs={"X": []},
                    outputs={"Out": [origin_var]},
                    attrs={
                        "epmap": endpoints[:1],
                        "with_barrier": False,
                        "varnames": slice_var_names,
                        "sync_mode": True
                    })
            block.append_op(
                type='save',
                inputs={'X': [origin_var]},
                outputs={},
                attrs={'file_path': os.path.join(dirname, origin_var.name)})
            block.append_op(type='delete_var', inputs={'X': slice_vars})
        executor.run(prog)

    def __save_distributed_lookup_tables(executor, dirname,
                                         distributed_lookup_table, endpoints):
        """
        because the distributed lookup table may too huge to merge and save at one place,
        it will be saved at parameter server independent respectively.

        the save directory is dirname/"__lookup_table__".

        """
        prog = Program()
        block = prog.global_block()

        # if there is lookup table, the trainer 0 will notify all pserver to save.
        lookup_table_filename = os.path.join(dirname, "__lookup_table__")
        attrs = {}
        attrs['epmap'] = endpoints
        attrs['dir'] = lookup_table_filename
        attrs['lookup_table'] = distributed_lookup_table
        block.append_op(
            type='checkpoint_notify', inputs={}, outputs={}, attrs=attrs)
        executor.run(prog)

    def __exclude_vars(exclude_var_names=[]):
        def is_valid(var):
            if var.name in exclude_var_names:
                return False
            if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
                        var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
                        var.desc.type() == core.VarDesc.VarType.READER:
                return False
            return var.persistable

        return is_valid

    if not isinstance(main_program, Program):
T
tangwei12 已提交
467
        raise TypeError("'main_program' should be an instance of Program.")
468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500

    if not main_program._is_distributed:
        raise ValueError(
            "'_save_distributed_persistables' just be designed for distributed training."
        )

    remote_params_map = main_program._parameters_on_pservers.get_distributed_vars_by_vtypes(
        ["Optimizer", "RemotePrefetch"], groupby=True)

    exclude_var_names = []
    if remote_params_map:
        exclude_var_names.extend(remote_params_map.keys())

    if main_program._distributed_lookup_table:
        if isinstance(main_program._distributed_lookup_table, list):
            exclude_var_names.extend(main_program._distributed_lookup_table)
        else:
            exclude_var_names.append(main_program._distributed_lookup_table)

    local_vars = list(
        filter(__exclude_vars(exclude_var_names), main_program.list_vars()))
    save_vars(
        executor, main_program=main_program, dirname=dirname, vars=local_vars)

    if main_program._is_chief:
        if remote_params_map:
            __save_remote_params(executor, dirname, remote_params_map)
        if main_program._distributed_lookup_table:
            __save_distributed_lookup_tables(
                executor, dirname, main_program._distributed_lookup_table,
                main_program._endpoints)


501
def save_persistables(executor, dirname, main_program=None, filename=None):
502
    """
503 504
    This function filters out all variables with `persistable==True` from the
    give `main_program` and then saves these variables to the folder `dirname`
F
fengjiayi 已提交
505 506
    or file `filename`.

507 508 509
    The `dirname` is used to specify the folder where persistable variables
    are going to be saved. If you would like to save variables in separate
    files, set `filename` None; if you would like to save all variables in a
F
fengjiayi 已提交
510 511 512 513 514
    single file, use `filename` to specify the file name.

    Args:
        executor(Executor): The executor to run for saving persistable variables.
        dirname(str): The directory path.
515 516
        main_program(Program|None): The program whose persistbale variables will
                                    be saved. If it is None, the default main
F
fengjiayi 已提交
517 518
                                    program will be used automatically.
                                    Default: None
519
        filename(str|None): The file to saved all variables. If you prefer to
F
fengjiayi 已提交
520 521 522 523 524 525 526 527 528
                            save variables in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

H
Huihuang Zheng 已提交
529 530
            import paddle.fluid as fluid

F
fengjiayi 已提交
531 532
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
533
            # `prog` can be a program defined by the user
F
fengjiayi 已提交
534
            prog = fluid.default_main_program()
535
            fluid.io.save_persistables(executor=exe, dirname=param_path,
536
                                       main_program=prog)
537
    """
538 539 540 541 542 543 544 545 546 547 548
    if main_program and main_program._is_distributed:
        _save_distributed_persistables(
            executor, dirname=dirname, main_program=main_program)
    else:
        save_vars(
            executor,
            dirname=dirname,
            main_program=main_program,
            vars=None,
            predicate=is_persistable,
            filename=filename)
549 550


551 552 553 554 555
def load_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
556
              filename=None):
557
    """
F
fengjiayi 已提交
558 559
    Load variables from the given directory by executor.

560 561 562 563
    There are two ways to specify variables to be loaded: The first way, list
    variables in a list and assign it to the `vars`. The second way, assign the
    `main_program` with an existing program, then all variables in the program
    will be loaded. The first way has a higher priority. In other words if `vars`
F
fengjiayi 已提交
564 565
    are assigned, the `main_program` and the `predicate` will be ignored.

566 567 568
    The `dirname` are used to specify the folder where to load variables.
    If variables were saved in separate files in the folder `dirname`,
    set `filename` None; if all variables were saved in a single file,
F
fengjiayi 已提交
569
    use `filename` to specify it.
570

F
fengjiayi 已提交
571 572 573
    Args:
        executor(Executor): The executor to run for loading variables.
        dirname(str): The directory path.
574 575
        main_program(Program|None): The program whose variables will be loaded.
                                    If it is None, the default main program will
F
fengjiayi 已提交
576 577
                                    be used automatically.
                                    Default: None
578
        vars(list[Variable]|None): The list that contains all variables to load.
F
fengjiayi 已提交
579 580
                                   It has a higher priority than the `main_program`.
                                   Default: None
581 582 583 584
        predicate(function|None): If it is not None, only variables in the
                                  `main_program` that makes predicate(variable)==True
                                  will be loaded. It only works when we are using the
                                  `main_program` to specify variables (In other words
F
fengjiayi 已提交
585 586
                                  `vars` is None).
                                  Default: None
587
        filename(str|None): The file which saved all required variables. If variables
F
fengjiayi 已提交
588 589 590 591 592 593 594 595 596 597 598 599
                            were saved in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

600 601 602 603 604 605 606 607 608 609 610 611
            import paddle.fluid as fluid
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32', name='fc_w')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32', name='fc_b')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
F
fengjiayi 已提交
612

613
            param_path = "./my_paddle_model"
F
fengjiayi 已提交
614 615 616 617
            # The first usage: using `main_program` to specify variables
            def name_has_fc(var):
                res = "fc" in var.name
                return res
618 619 620
            fluid.io.save_vars(executor=exe, dirname=param_path, main_program=main_prog,
                              vars=None, predicate=name_has_fc)
            fluid.io.load_vars(executor=exe, dirname=param_path, main_program=main_prog,
C
chengduo 已提交
621
                               vars=None, predicate=name_has_fc)
F
fengjiayi 已提交
622 623 624 625
            # All variables in `main_program` whose name includes "fc" will be loaded.
            # And all the variables are supposed to have been saved in differnet files.

            # The second usage: using `vars` to specify variables
626 627 628 629
            path = "./my_paddle_vars"
            var_list = [w, b]
            fluid.io.save_vars(executor=exe, dirname=path, vars=var_list,
                               filename="vars_file")
630
            fluid.io.load_vars(executor=exe, dirname=path, vars=var_list,
F
fengjiayi 已提交
631
                               filename="vars_file")
632 633
            # w and b will be loaded. And they are supposed to haven
            # been saved in the same file named 'var_file' in the path "./my_paddle_vars".
634
    """
L
lujun 已提交
635
    load_dirname = os.path.normpath(dirname)
T
tangwei12 已提交
636

637
    if vars is None:
638
        if main_program is None:
Y
Yu Yang 已提交
639
            main_program = default_main_program()
640
        if not isinstance(main_program, Program):
641 642 643 644
            raise TypeError("program's type should be Program")

        load_vars(
            executor,
L
lujun 已提交
645
            dirname=load_dirname,
T
tangwei12 已提交
646
            main_program=main_program,
647
            vars=list(filter(predicate, main_program.list_vars())),
648
            filename=filename)
649 650 651
    else:
        load_prog = Program()
        load_block = load_prog.global_block()
652

653 654
        if main_program is None:
            main_program = default_main_program()
T
tangwei12 已提交
655

656 657 658
        if not isinstance(main_program, Program):
            raise TypeError("program should be as Program type or None")

H
hong 已提交
659 660
        #save origin param shape
        orig_para_shape = {}
661
        load_var_map = {}
662 663
        for each_var in vars:
            assert isinstance(each_var, Variable)
T
tangwei12 已提交
664 665
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
H
hong 已提交
666 667 668 669 670 671

            if isinstance(each_var, Parameter):
                var_temp = paddle.fluid.global_scope().find_var(each_var.name)
                assert var_temp != None, "can't not find var: " + each_var.name
                orig_para_shape[each_var.name] = (
                    np.array(var_temp.get_tensor())).shape
672
            new_var = _clone_var_in_block_(load_block, each_var)
673
            if filename is None:
674 675 676 677
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [new_var]},
L
lujun 已提交
678 679 680
                    attrs={
                        'file_path': os.path.join(load_dirname, new_var.name)
                    })
681 682 683
            else:
                load_var_map[new_var.name] = new_var

684
        if filename is not None:
685 686 687 688
            load_var_list = []
            for name in sorted(load_var_map.keys()):
                load_var_list.append(load_var_map[name])

689
            load_block.append_op(
690
                type='load_combine',
691
                inputs={},
692
                outputs={"Out": load_var_list},
L
lujun 已提交
693
                attrs={'file_path': os.path.join(load_dirname, filename)})
694 695
        executor.run(load_prog)

H
hong 已提交
696 697 698 699 700 701 702 703 704 705 706 707 708 709 710
        #check var shape
        for each_var in vars:
            if not isinstance(each_var, Parameter):
                continue
            var_temp = paddle.fluid.global_scope().find_var(each_var.name)
            assert var_temp != None, "can't not find var: " + each_var.name
            new_shape = (np.array(var_temp.get_tensor())).shape
            assert each_var.name in orig_para_shape, earch_var.name + "MUST in var list"
            orig_shape = orig_para_shape.get(each_var.name)
            if new_shape != orig_shape:
                raise RuntimeError(
                    "Shape not matching: the Program requires a parameter with a shape of ({}), "
                    "while the loaded parameter (namely [ {} ]) has a shape of  ({}).".
                    format(orig_shape, each_var.name, new_shape))

711

712
def load_params(executor, dirname, main_program=None, filename=None):
713
    """
714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732
    This API filters out all parameters from the give ``main_program``
    and then tries to load these parameters from the directory ``dirname`` or
    the file ``filename``.

    Use the ``dirname`` to specify the directory where parameters were saved. If
    parameters were saved in separate files under the directory `dirname`, set
    ``filename`` as None; if all parameters were saved in a single file, use
    ``filename`` to specify the file name.

    **Note**:
        Some variables are not Parameter while they are necessary for
        training, such as learning rate, global step, etc. So you cannot save and
        continue your training just by using :ref:`api_fluid_io_save_params` and
        :ref:`api_fluid_io_load_params`. Please use :ref:`api_fluid_io_save_persistables`
        and :ref:`api_fluid_io_load_persistables` instead.

        If you want to load the pre-trained model structure and parameters
        for the inference, please use the :ref:`api_fluid_io_load_inference_model` API. You can
        refer to :ref:`api_guide_model_save_reader_en` for more details.
F
fengjiayi 已提交
733 734

    Args:
735 736
        executor(Executor): The executor used for loading parameters.
                            See :ref:`api_guide_executor_en` for more details about it.
F
fengjiayi 已提交
737
        dirname(str): The directory path.
738 739 740 741 742 743 744 745
        main_program(Program, optional): The program whose parameters will be
                                    loaded. If it is None, the ``default_main_program``
                                    will be used automatically. See :ref:`api_guide_Program_en`
                                    for more about ``Program``.
                                    Default: None.
        filename(str, optional): The file which saved all parameters. If parameters
                            were saved in separated files, set it to None.
                            Default: None.
F
fengjiayi 已提交
746 747 748 749 750 751 752

    Returns:
        None

    Examples:
        .. code-block:: python

753
            import paddle.fluid as fluid
754

F
fengjiayi 已提交
755 756 757
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
758
            fluid.io.load_params(executor=exe, dirname=param_path,
F
fengjiayi 已提交
759
                                main_program=None)
760 761
    """
    load_vars(
762 763 764
        executor,
        dirname=dirname,
        main_program=main_program,
765
        predicate=is_parameter,
766
        filename=filename)
767 768


769
def load_persistables(executor, dirname, main_program=None, filename=None):
770
    """
771 772 773
    This API filters out all variables with ``persistable==True`` from the
    given ``main_program`` and then tries to load these variables from the
    directory ``dirnameme`` or the file ``filename``.
F
fengjiayi 已提交
774

775 776 777 778
    Use the ``dirname`` to specify the directory where persistable variables
    (refer to :ref:`api_guide_model_save_reader_en`) were saved. If variables
    were saved in separate files, set ``filename`` as None; if all variables
    were saved in a single file, use ``filename`` to specify the file name.
F
fengjiayi 已提交
779 780

    Args:
781 782
        executor(Executor): The executor used for loading persistable variables.
                            See :ref:`api_guide_executor_en` for more details about it.
F
fengjiayi 已提交
783
        dirname(str): The directory path.
784 785 786 787 788 789 790 791
        main_program(Program, optional): The program whose persistbale variables will
                                    be loaded. If it is None, the ``default_main_program``
                                    will be used automatically. See :ref:`api_guide_Program_en`
                                    for more about ``Program``.
                                    Default: None.
        filename(str, optional): The file which saved all persistable variables. If variables
                                 were saved in separated files, set it to None.
                                 Default: None.
F
fengjiayi 已提交
792 793 794 795 796 797 798

    Returns:
        None

    Examples:
        .. code-block:: python

799
            import paddle.fluid as fluid
800

F
fengjiayi 已提交
801 802 803
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
804
            fluid.io.load_persistables(executor=exe, dirname=param_path,
F
fengjiayi 已提交
805
                                       main_program=None)
806
    """
807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837

    if main_program and main_program._is_distributed:
        _load_distributed_persistables(
            executor, dirname=dirname, main_program=main_program)
    else:
        load_vars(
            executor,
            dirname=dirname,
            main_program=main_program,
            predicate=is_persistable,
            filename=filename)


def _load_distributed_persistables(executor, dirname, main_program=None):
    """
    customized load_persistables for distributed training.
    it should be used on parameter server,

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The load directory path.
        main_program(Program): The program whose parameters will be
                            loaded. the main_program must be the pserver_program
                            get after transpiler.

    Returns:
        None

    Examples:
        .. code-block:: python

838
            import paddle.fluid as fluid
839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885
            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            t = distribute_transpiler.DistributeTranspiler()
            t.transpile(...)
            pserver_prog = t.get_pserver_program(...)
            _load_distributed_persistables(executor=exe, dirname=param_path, main_program=pserver_prog)
    """

    def __is_distributed_part_var(varname):
        trainer_idx = varname.find(".trainer_")
        block_idx = varname.find(".block")
        return trainer_idx or block_idx

    def __load_persistable_vars(executor, dirname, need_load_vars):
        load_prog = Program()
        load_block = load_prog.global_block()
        need_delete_vars = []

        for param in need_load_vars:
            origin_var = param.origin
            slice_var = param.slice
            is_slice = param.is_slice
            offset = param.offset

            if is_slice:
                origin = load_block.create_var(
                    name="{}.load".format(origin_var.name),
                    type=origin_var.type,
                    shape=origin_var.shape,
                    dtype=origin_var.dtype,
                    persistable=True)

                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [origin]},
                    attrs={
                        'file_path': os.path.join(dirname, origin_var.name)
                    })

                slice = load_block.create_var(
                    name=slice_var.name,
                    type=slice_var.type,
                    shape=slice_var.shape,
                    dtype=slice_var.dtype,
                    persistable=True)

T
tangwei12 已提交
886 887 888 889
                dim1_flatten = 1
                if len(slice.shape) >= 2:
                    dim1_flatten = reduce(lambda x, y: x * y, slice.shape[1:])

890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923
                start = int(offset / dim1_flatten)
                end = int(offset / dim1_flatten + slice.shape[0])

                load_block.append_op(
                    type="slice",
                    inputs={'Input': origin},
                    outputs={'Out': slice},
                    attrs={'axes': [0],
                           'starts': [start],
                           'ends': [end]})

                need_delete_vars.append(origin)
            else:
                origin = load_block.create_var(
                    name="{}".format(origin_var.name),
                    type=origin_var.type,
                    shape=origin_var.shape,
                    dtype=origin_var.dtype,
                    persistable=True)
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [origin]},
                    attrs={
                        'file_path': os.path.join(dirname, origin_var.name)
                    })

        load_block.append_op(
            type='delete_var',
            inputs={'X': need_delete_vars}, )

        executor.run(load_prog)

    if not isinstance(main_program, Program):
T
tangwei12 已提交
924
        raise TypeError("'main_program' should be an instance of Program.")
925 926 927 928 929 930 931 932 933 934 935 936 937 938

    if not main_program._is_distributed:
        raise ValueError(
            "'_load_distributed_persistables' just be designed for distributed training."
        )

    if not main_program._ps_endpoint:
        raise ValueError(
            "'_load_distributed_persistables' need current_endpoint set in DistributeTranspiler.transpile"
        )

    need_load_vars = main_program._parameters_on_pservers.get_distributed_vars_by_ep(
        main_program._ps_endpoint)
    __load_persistable_vars(executor, dirname, need_load_vars)
939 940


941 942 943
def prepend_feed_ops(inference_program,
                     feed_target_names,
                     feed_holder_name='feed'):
Q
Qiao Longfei 已提交
944 945 946
    if len(feed_target_names) == 0:
        return

K
Kexin Zhao 已提交
947 948
    global_block = inference_program.global_block()
    feed_var = global_block.create_var(
949 950 951
        name=feed_holder_name,
        type=core.VarDesc.VarType.FEED_MINIBATCH,
        persistable=True)
K
Kexin Zhao 已提交
952

953
    for i, name in enumerate(feed_target_names):
K
fix bug  
Kexin Zhao 已提交
954
        out = global_block.var(name)
W
Wu Yi 已提交
955
        global_block._prepend_op(
K
Kexin Zhao 已提交
956 957
            type='feed',
            inputs={'X': [feed_var]},
K
fix bug  
Kexin Zhao 已提交
958
            outputs={'Out': [out]},
K
Kexin Zhao 已提交
959 960 961
            attrs={'col': i})


962 963 964
def append_fetch_ops(inference_program,
                     fetch_target_names,
                     fetch_holder_name='fetch'):
K
Kexin Zhao 已提交
965 966
    global_block = inference_program.global_block()
    fetch_var = global_block.create_var(
967 968 969
        name=fetch_holder_name,
        type=core.VarDesc.VarType.FETCH_LIST,
        persistable=True)
K
Kexin Zhao 已提交
970

971
    for i, name in enumerate(fetch_target_names):
K
Kexin Zhao 已提交
972 973 974 975 976 977 978
        global_block.append_op(
            type='fetch',
            inputs={'X': [name]},
            outputs={'Out': [fetch_var]},
            attrs={'col': i})


979 980 981 982
def save_inference_model(dirname,
                         feeded_var_names,
                         target_vars,
                         executor,
983
                         main_program=None,
984
                         model_filename=None,
985
                         params_filename=None,
T
tangwei12 已提交
986 987
                         export_for_deployment=True,
                         program_only=False):
988
    """
F
fengjiayi 已提交
989 990
    Prune the given `main_program` to build a new program especially for inference,
    and then save it and all related parameters to given `dirname` by the `executor`.
991 992 993 994
    If you just want to save parameters of your trained model, please use the
    `save_params` API. You can refer to :ref:`api_guide_model_save_reader_en` for
    more details.

F
fengjiayi 已提交
995 996 997

    Args:
        dirname(str): The directory path to save the inference model.
998
        feeded_var_names(list[str]): Names of variables that need to be feeded data
F
fengjiayi 已提交
999
                                     during inference.
1000
        target_vars(list[Variable]): Variables from which we can get inference
F
fengjiayi 已提交
1001 1002
                                     results.
        executor(Executor): The executor that saves the inference model.
1003 1004
        main_program(Program|None): The original program, which will be pruned to
                                    build the inference model. If is setted None,
F
fengjiayi 已提交
1005 1006
                                    the default main program will be used.
                                    Default: None.
1007 1008
        model_filename(str|None): The name of file to save the inference program
                                  itself. If is setted None, a default filename
F
fengjiayi 已提交
1009
                                  `__model__` will be used.
1010 1011
        params_filename(str|None): The name of file to save all related parameters.
                                   If it is setted None, parameters will be saved
F
fengjiayi 已提交
1012
                                   in separate files .
X
Xin Pan 已提交
1013 1014 1015 1016 1017
        export_for_deployment(bool): If True, programs are modified to only support
                                     direct inference deployment. Otherwise,
                                     more information will be stored for flexible
                                     optimization and re-training. Currently, only
                                     True is supported.
T
tangwei12 已提交
1018
        program_only(bool): If True, It will save inference program only, and do not save params of Program.
1019

F
fengjiayi 已提交
1020
    Returns:
F
flame 已提交
1021
        target_var_name_list(list): The fetch variables' name list
F
fengjiayi 已提交
1022 1023 1024 1025 1026 1027 1028

    Raises:
        ValueError: If `feed_var_names` is not a list of basestring.
        ValueError: If `target_vars` is not a list of Variable.

    Examples:
        .. code-block:: python
F
fengjiayi 已提交
1029

1030 1031
            import paddle.fluid as fluid

F
fengjiayi 已提交
1032 1033
            path = "./infer_model"

1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055
            # User defined network, here a softmax regresssion example
            image = fluid.layers.data(name='img', shape=[1, 28, 28], dtype='float32')
            label = fluid.layers.data(name='label', shape=[1], dtype='int64')
            feeder = fluid.DataFeeder(feed_list=[image, label], place=fluid.CPUPlace())
            predict = fluid.layers.fc(input=image, size=10, act='softmax')

            loss = fluid.layers.cross_entropy(input=predict, label=label)
            avg_loss = fluid.layers.mean(loss)

            exe = fluid.Executor(fluid.CPUPlace())
            exe.run(fluid.default_startup_program())

            # Feed data and train process

            # Save inference model. Note we don't save label and loss in this example
            fluid.io.save_inference_model(dirname=path,
                                          feeded_var_names=['img'],
                                          target_vars=[predict],
                                          executor=exe)

            # In this example, the function will prune the default main program
            # to make it suitable for infering the `predict` var. The pruned
1056
            # inference program is going to be saved in the "./infer_model/__model__"
F
fengjiayi 已提交
1057
            # and parameters are going to be saved in separate files under folder
1058
            # "./infer_model".
1059 1060

    """
M
minqiyang 已提交
1061
    if isinstance(feeded_var_names, six.string_types):
F
fengjiayi 已提交
1062
        feeded_var_names = [feeded_var_names]
X
Xin Pan 已提交
1063
    elif export_for_deployment:
Q
Qiao Longfei 已提交
1064
        if len(feeded_var_names) > 0:
1065
            # TODO(paddle-dev): polish these code blocks
Q
Qiao Longfei 已提交
1066
            if not (bool(feeded_var_names) and all(
M
minqiyang 已提交
1067
                    isinstance(name, six.string_types)
1068
                    for name in feeded_var_names)):
M
minqiyang 已提交
1069
                raise ValueError("'feed_var_names' should be a list of str.")
F
fengjiayi 已提交
1070 1071

    if isinstance(target_vars, Variable):
F
fengjiayi 已提交
1072
        target_vars = [target_vars]
X
Xin Pan 已提交
1073
    elif export_for_deployment:
1074 1075
        if not (bool(target_vars) and
                all(isinstance(var, Variable) for var in target_vars)):
F
fengjiayi 已提交
1076 1077
            raise ValueError("'target_vars' should be a list of Variable.")

C
chengduo 已提交
1078
    main_program = _get_valid_program(main_program)
T
tangwei12 已提交
1079

1080 1081 1082 1083 1084 1085 1086 1087 1088
    # remind user to set auc_states to zeros if the program contains auc op 
    all_ops = main_program.global_block().ops
    for op in all_ops:
        if op.type == 'auc':
            warnings.warn(
                "please ensure that you have set the auc states to zeros before saving inference model"
            )
            break

1089 1090 1091 1092 1093
    # fix the bug that the activation op's output as target will be pruned.
    # will affect the inference performance.
    # TODO(Superjomn) add an IR pass to remove 1-scale op.
    with program_guard(main_program):
        uniq_target_vars = []
F
flame 已提交
1094
        for i, var in enumerate(target_vars):
1095
            if isinstance(var, Variable):
F
flame 已提交
1096 1097 1098
                var = layers.scale(
                    var, 1., name="save_infer_model/scale_{}".format(i))
            uniq_target_vars.append(var)
1099
        target_vars = uniq_target_vars
F
flame 已提交
1100
    target_var_name_list = [var.name for var in target_vars]
1101

1102
    # when a pserver and a trainer running on the same machine, mkdir may conflict
L
lujun 已提交
1103
    save_dirname = dirname
1104
    try:
L
lujun 已提交
1105 1106
        save_dirname = os.path.normpath(dirname)
        os.makedirs(save_dirname)
1107 1108 1109 1110
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

X
Xin Pan 已提交
1111 1112 1113 1114
    if model_filename is not None:
        model_basename = os.path.basename(model_filename)
    else:
        model_basename = "__model__"
L
lujun 已提交
1115
    model_basename = os.path.join(save_dirname, model_basename)
1116

X
Xin Pan 已提交
1117 1118 1119 1120
    # When export_for_deployment is true, we modify the program online so that
    # it can only be loaded for inference directly. If it's false, the whole
    # original program and related meta are saved so that future usage can be
    # more flexible.
1121 1122 1123

    origin_program = main_program.clone()

X
Xin Pan 已提交
1124
    if export_for_deployment:
X
Xin Pan 已提交
1125 1126
        main_program = main_program.clone()
        global_block = main_program.global_block()
1127
        need_to_remove_op_index = []
X
Xin Pan 已提交
1128 1129 1130
        for i, op in enumerate(global_block.ops):
            op.desc.set_is_target(False)
            if op.type == "feed" or op.type == "fetch":
1131 1132 1133 1134 1135
                need_to_remove_op_index.append(i)

        for index in need_to_remove_op_index[::-1]:
            global_block._remove_op(index)

X
Xin Pan 已提交
1136
        main_program.desc.flush()
X
Xin Pan 已提交
1137

1138 1139
        main_program = main_program._prune_with_input(
            feeded_var_names=feeded_var_names, targets=target_vars)
X
Xin Pan 已提交
1140
        main_program = main_program._inference_optimize(prune_read_op=True)
X
Xin Pan 已提交
1141 1142
        fetch_var_names = [v.name for v in target_vars]

X
Xin Pan 已提交
1143 1144 1145
        prepend_feed_ops(main_program, feeded_var_names)
        append_fetch_ops(main_program, fetch_var_names)

1146 1147
        main_program.desc._set_version()
        paddle.fluid.core.save_op_compatible_info(main_program.desc)
X
Xin Pan 已提交
1148 1149
        with open(model_basename, "wb") as f:
            f.write(main_program.desc.serialize_to_string())
X
Xin Pan 已提交
1150 1151 1152
    else:
        # TODO(panyx0718): Save more information so that it can also be used
        # for training and more flexible post-processing.
X
Xin Pan 已提交
1153 1154
        with open(model_basename + ".main_program", "wb") as f:
            f.write(main_program.desc.serialize_to_string())
T
tangwei12 已提交
1155

T
tangwei12 已提交
1156 1157 1158 1159 1160 1161
    if program_only:
        warnings.warn(
            "save_inference_model specified the param `program_only` to True, It will not save params of Program."
        )
        return target_var_name_list

1162 1163
    main_program._copy_dist_param_info_from(origin_program)

X
fix  
Xin Pan 已提交
1164 1165
    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
1166

L
lujun 已提交
1167
    save_persistables(executor, save_dirname, main_program, params_filename)
F
flame 已提交
1168
    return target_var_name_list
X
fix  
Xin Pan 已提交
1169

1170

1171 1172 1173
def load_inference_model(dirname,
                         executor,
                         model_filename=None,
T
tangwei12 已提交
1174 1175
                         params_filename=None,
                         pserver_endpoints=None):
1176
    """
1177 1178 1179
    Load the inference model from a given directory. By this API, you can get the model
    structure(Inference Program) and model parameters. If you just want to load
    parameters of the pre-trained model, please use the :ref:`api_fluid_io_load_params` API.
1180
    You can refer to :ref:`api_guide_model_save_reader_en` for more details.
1181

F
fengjiayi 已提交
1182
    Args:
1183
        dirname(str): The given directory path.
F
fengjiayi 已提交
1184
        executor(Executor): The executor to run for loading inference model.
1185 1186
                            See :ref:`api_guide_executor_en` for more details about it.
        model_filename(str, optional): The name of file to load the inference program.
1187
                                  If it is None, the default filename
1188 1189 1190
                                  ``__model__`` will be used.
                                  Default: ``None``.
        params_filename(str, optional): The name of file to load all parameters.
1191 1192 1193
                                   It is only used for the case that all
                                   parameters were saved in a single binary
                                   file. If parameters were saved in separate
1194 1195 1196 1197 1198 1199
                                   files, set it as ``None``.
                                   Default: ``None``.

        pserver_endpoints(list, optional): It is only needed by the distributed inference.
                                    If using a distributed look up table during the training,
                                    this table is also needed by the inference process. Its value is
1200
                                    a list of pserver endpoints.
F
fengjiayi 已提交
1201 1202

    Returns:
1203
        list: The return of this API is a list with three elements:
1204
        (program, feed_target_names, fetch_targets). The `program` is a
1205 1206 1207 1208 1209
        ``Program`` (refer to :ref:`api_guide_Program_en`), which is used for inference.
        The `feed_target_names` is a list of ``str``, which contains names of variables
        that need to feed data in the inference program. The `fetch_targets` is a list of
        ``Variable`` (refer to :ref:`api_guide_Program_en`). It contains variables from which
        we can get inference results.
F
fengjiayi 已提交
1210 1211 1212 1213 1214 1215 1216

    Raises:
        ValueError: If `dirname` is not a existing directory.

    Examples:
        .. code-block:: python

1217 1218
            import paddle.fluid as fluid
            import numpy as np
1219 1220

            # Build the model
1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231
            main_prog = fluid.Program()
            startup_prog = fluid.Program()
            with fluid.program_guard(main_prog, startup_prog):
                data = fluid.layers.data(name="img", shape=[64, 784], append_batch_size=False)
                w = fluid.layers.create_parameter(shape=[784, 200], dtype='float32')
                b = fluid.layers.create_parameter(shape=[200], dtype='float32')
                hidden_w = fluid.layers.matmul(x=data, y=w)
                hidden_b = fluid.layers.elementwise_add(hidden_w, b)
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(startup_prog)
1232 1233

            # Save the inference model
F
fengjiayi 已提交
1234
            path = "./infer_model"
1235 1236
            fluid.io.save_inference_model(dirname=path, feeded_var_names=['img'],
                         target_vars=[hidden_b], executor=exe, main_program=main_prog)
1237 1238 1239

            # Demo one. Not need to set the distributed look up table, because the
            # training doesn't use a distributed look up table.
1240 1241
            [inference_program, feed_target_names, fetch_targets] = (
                fluid.io.load_inference_model(dirname=path, executor=exe))
1242
            tensor_img = np.array(np.random.random((1, 64, 784)), dtype=np.float32)
F
fengjiayi 已提交
1243 1244 1245 1246
            results = exe.run(inference_program,
                          feed={feed_target_names[0]: tensor_img},
                          fetch_list=fetch_targets)

1247 1248 1249
            # Demo two. If the training uses a distributed look up table, the pserver
            # endpoints list should be supported when loading the inference model.
            # The below is just an example.
1250
            endpoints = ["127.0.0.1:2023","127.0.0.1:2024"]
1251
            [dist_inference_program, dist_feed_target_names, dist_fetch_targets] = (
1252 1253
                fluid.io.load_inference_model(dirname=path,
                                              executor=exe,
1254
                                              pserver_endpoints=endpoints))
1255

1256
            # In this example, the inference program was saved in the file
1257
            # "./infer_model/__model__" and parameters were saved in
1258 1259 1260 1261
            # separate files under the directory "./infer_model".
            # By the inference program, feed_target_names and
            # fetch_targets, we can use an executor to run the inference
            # program for getting the inference result.
1262
    """
L
lujun 已提交
1263 1264
    load_dirname = os.path.normpath(dirname)
    if not os.path.isdir(load_dirname):
1265 1266
        raise ValueError("There is no directory named '%s'", dirname)

1267 1268
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
1269
    else:
1270
        model_filename = "__model__"
L
lujun 已提交
1271
    model_filename = os.path.join(load_dirname, model_filename)
1272 1273 1274

    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
1275

1276
    with open(model_filename, "rb") as f:
1277 1278
        program_desc_str = f.read()

1279
    program = Program.parse_from_string(program_desc_str)
X
Xin Pan 已提交
1280
    if not core._is_program_version_supported(program._version()):
X
version  
Xin Pan 已提交
1281 1282 1283
        raise ValueError("Unsupported program version: %d\n" %
                         program._version())
    # Binary data also need versioning.
L
lujun 已提交
1284
    load_persistables(executor, load_dirname, program, params_filename)
1285

T
tangwei12 已提交
1286
    if pserver_endpoints:
T
tangwei12 已提交
1287
        program = _endpoints_replacement(program, pserver_endpoints)
T
tangwei12 已提交
1288

1289 1290
    feed_target_names = program.desc.get_feed_target_names()
    fetch_target_names = program.desc.get_fetch_target_names()
1291 1292 1293 1294 1295
    fetch_targets = [
        program.global_block().var(name) for name in fetch_target_names
    ]

    return [program, feed_target_names, fetch_targets]
X
xuwei06 已提交
1296 1297


T
tangwei12 已提交
1298 1299 1300
def _endpoints_replacement(program, endpoints):
    ENDPOINT_MAP = "epmap"
    for op in program.global_block().ops:
T
tangwei12 已提交
1301 1302
        if op.has_attr(ENDPOINT_MAP):
            op.set_attr(ENDPOINT_MAP, endpoints)
T
fix  
tangwei12 已提交
1303
    program._sync_with_cpp()
T
tangwei12 已提交
1304
    return program
T
tangwei12 已提交
1305 1306


X
xuwei06 已提交
1307 1308
def get_parameter_value(para, executor):
    """
F
fengjiayi 已提交
1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319
    Get the LoDTensor value of the given parameter.

    Args:
        para(Parameter): The parameter to get value from.
        executor(Executor): The executor to run for retrieving the value.

    Returns:
        numpy.array: The given parameter's values.

    Raises:
        AssertionError: If the `para` is not an instance of Parameter.
X
xuwei06 已提交
1320

F
fengjiayi 已提交
1321 1322
    Examples:
        .. code-block:: python
X
xuwei06 已提交
1323

1324
            import paddle.fluid as fluid
F
fengjiayi 已提交
1325 1326 1327
            exe = fluid.Executor(fluid.CPUPlace())
            param = fluid.default_main_program().global_block().var('fc.w')
            p = fluid.io.get_parameter_value(param, exe)
1328

X
xuwei06 已提交
1329
    """
X
xuwei06 已提交
1330 1331
    assert is_parameter(para)

X
xuwei06 已提交
1332 1333 1334 1335 1336 1337 1338 1339
    get_program = Program()
    block = get_program.global_block()
    new_var = _clone_var_in_block_(block, para)
    return executor.run(get_program, feed={}, fetch_list=[new_var])[0]


def get_parameter_value_by_name(name, executor, program=None):
    """
F
fengjiayi 已提交
1340
    Get the LoDTensor value of a certain parameter by its name.
X
xuwei06 已提交
1341

F
fengjiayi 已提交
1342 1343 1344 1345 1346 1347 1348
    Args:
        name(str): The parameter's name.
        executor(Executor): The executor to run for retrieving the value.
        program(Program | None): The program where to find the parameter.
                               If it's set to be None, the function will
                               try to find the parameter in the default
                               main program.
X
xuwei06 已提交
1349

F
fengjiayi 已提交
1350 1351
    Returns:
        numpy.array: The parameter's values.
1352

F
fengjiayi 已提交
1353 1354 1355 1356 1357
    Raises:
        TypeError: If given `name` is not an instance of basestring.
        TypeError: If the parameter with the given name doesn't exist.
        AssertionError: If there is a varibale named `name` in the
                        given program but it is not a Parameter.
1358

F
fengjiayi 已提交
1359 1360 1361
    Examples:
        .. code-block:: python

1362
            import paddle.fluid as fluid
F
fengjiayi 已提交
1363 1364
            exe = fluid.Executor(fluid.CPUPlace())
            p = fluid.io.get_parameter_value('fc.w', exe)
X
xuwei06 已提交
1365 1366
    """
    if program is None:
Y
Yu Yang 已提交
1367
        program = default_main_program()
X
xuwei06 已提交
1368 1369
    var = program.global_block().var(name)
    return get_parameter_value(var, executor)
1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446


def _save_persistable_nodes(executor, dirname, graph):
    """
    Save persistable nodes to the given directory by the executor.

    Args:
        executor(Executor): The executor to run for saving node values.
        dirname(str): The directory path.
        graph(IrGraph): All the required persistable nodes in the graph will be saved.
    """
    persistable_node_names = set()
    persistable_nodes = []
    all_persistable_nodes = graph.all_persistable_nodes()
    for node in all_persistable_nodes:
        name = cpt.to_text(node.name())
        if name not in persistable_node_names:
            persistable_node_names.add(name)
            persistable_nodes.append(node)
    program = Program()
    var_list = []
    for node in persistable_nodes:
        var_desc = node.var()
        if var_desc.type() == core.VarDesc.VarType.RAW or \
                var_desc.type() == core.VarDesc.VarType.READER:
            continue
        var = program.global_block().create_var(
            name=var_desc.name(),
            shape=var_desc.shape(),
            dtype=var_desc.dtype(),
            type=var_desc.type(),
            lod_level=var_desc.lod_level(),
            persistable=var_desc.persistable())
        var_list.append(var)
    save_vars(executor=executor, dirname=dirname, vars=var_list)


def _load_persistable_nodes(executor, dirname, graph):
    """
    Load persistable node values from the given directory by the executor.

    Args:
        executor(Executor): The executor to run for loading node values.
        dirname(str): The directory path.
        graph(IrGraph): All the required persistable nodes in the graph will be loaded.
    """
    persistable_node_names = set()
    persistable_nodes = []
    all_persistable_nodes = graph.all_persistable_nodes()
    for node in all_persistable_nodes:
        name = cpt.to_text(node.name())
        if name not in persistable_node_names:
            persistable_node_names.add(name)
            persistable_nodes.append(node)
    program = Program()
    var_list = []

    def _exist(var):
        return os.path.exists(os.path.join(dirname, var.name))

    for node in persistable_nodes:
        var_desc = node.var()
        if var_desc.type() == core.VarDesc.VarType.RAW or \
                var_desc.type() == core.VarDesc.VarType.READER:
            continue
        var = program.global_block().create_var(
            name=var_desc.name(),
            shape=var_desc.shape(),
            dtype=var_desc.dtype(),
            type=var_desc.type(),
            lod_level=var_desc.lod_level(),
            persistable=var_desc.persistable())
        if _exist(var):
            var_list.append(var)
        else:
            _logger.warn("Cannot find the var %s!!!" % (node.name()))
    load_vars(executor=executor, dirname=dirname, vars=var_list)
H
hong 已提交
1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539


def save(program, model_path):
    """
    This function save parameters, optimizer information and network description to  model_path.

    The parameters contains all the trainable Variable, will save to a file with suffix ".pdparams".
    The optimizer information contains all the variable used by optimizer. For Adam optimizer, contains beta1, beta2, momentum etc. All the information will save to a file with suffix ".pdopt". (If the optimizer have no variable need to save (like SGD), the fill will not generated).
    The network description is the description of the program. It's only used for deployment. The description  will save to a file with a suffix ".pdmodel".
    
    Args:
        program(Program) : The program to saved.
        model_path(str): the file prefix to save the program. The format is "dirname/file_prefix". If file_prefix is empty str. A exception will be raised

    Returns:
        None

    Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            prog = fluid.default_main_program()
            fluid.save( prog, "./temp")

    """

    base_name = os.path.basename(model_path)
    assert base_name != "", \
            "model_path MUST be format of dirname/filename [dirname\\filename in Window], Now filename is empty str"

    parameter_list = list(filter(is_parameter, program.list_vars()))
    paddle.fluid.core._save_static_dict(model_path + ".pdparams",
                                        parameter_list, global_scope())

    optimizer_var_list = list(
        filter(is_belong_to_optimizer, program.list_vars()))

    paddle.fluid.core._save_static_dict(model_path + ".pdopt",
                                        optimizer_var_list, global_scope())

    main_program = program.clone()
    program.desc.flush()
    main_program.desc._set_version()
    paddle.fluid.core.save_op_compatible_info(program.desc)

    with open(model_path + ".pdmodel", "wb") as f:
        f.write(program.desc.serialize_to_string())


def load(program, model_path):
    """
    This function filter out parameters and optimizer information from program, and then get corresponding value from file.
    An exception will throw if shape or dtype of the parameters is not match between program and loaded file.

    NOTICE: This function MUST called after run start_up_program

    Args: 
        program: The program to be load
        model_path: The file prefix store the program

    Returns:
        None
        
     Examples:
        .. code-block:: python

            import paddle.fluid as fluid

            prog = fluid.default_main_program()
            fluid.save( prog, "./temp")

            fluid.load( prog, "./temp")

    """

    parameter_file_name = model_path + ".pdparams"
    assert os.path.exists(parameter_file_name), \
            "Parameter file [{}] not exits".format( parameter_file_name)

    parameter_list = list(filter(is_parameter, program.list_vars()))
    paddle.fluid.core._load_static_dict(parameter_file_name, parameter_list,
                                        global_scope())

    optimizer_var_list = list(
        filter(is_belong_to_optimizer, program.list_vars()))

    if len(optimizer_var_list) > 0:
        opt_file_name = model_path + ".pdopt"
        assert os.path.exists(opt_file_name), \
                "Optimizer file [{}] not exits".format( opt_file_name)
        paddle.fluid.core._load_static_dict(opt_file_name, optimizer_var_list,
                                            global_scope())