io.py 52.6 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import os
T
bug fix  
tangwei12 已提交
16
import errno
T
tangwei12 已提交
17 18
import time
import shutil
19

20
from paddle.fluid.evaluator import Evaluator
21
from paddle.fluid.framework import Program, Parameter, default_main_program, default_startup_program, Variable
K
fix bug  
Kexin Zhao 已提交
22
from . import core
23 24

__all__ = [
T
tangwei12 已提交
25 26
    'save_vars', 'save_params', 'save_persistables', 'load_vars', 'load_params',
    'load_persistables', 'save_inference_model', 'load_inference_model',
T
tangwei12 已提交
27
    'get_inference_program', 'save_checkpoint', 'load_checkpoint',
28
    'clean_checkpoint', 'load_persist_vars_without_grad',
29
    'load_lookup_table_vars', 'save_persist_vars_without_grad',
Y
yuyang18 已提交
30
    'get_latest_checkpoint_serial', 'foo'
31 32 33
]


Y
yuyang18 已提交
34 35 36 37
def foo():
    pass


38
def is_parameter(var):
F
fengjiayi 已提交
39 40
    """
    Check whether the given variable is an instance of Parameter.
41 42

    Args:
F
fengjiayi 已提交
43
        var(Variable): The variable to be checked.
44 45

    Returns:
F
fengjiayi 已提交
46 47 48 49 50 51 52 53
        bool: True if the given `var` is an instance of Parameter,
        False if not.

    Examples:
        .. code-block:: python

            param = fluid.default_main_program().global_block().var('fc.w')
            res = fluid.io.is_parameter(param)
54
    """
55 56 57 58
    return isinstance(var, Parameter)


def is_persistable(var):
F
fengjiayi 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    """
    Check whether the given variable is persistable.

    Args:
        var(Variable): The variable to be checked.

    Returns:
        bool: True if the given `var` is persistable
        False if not.

    Examples:
        .. code-block:: python

            param = fluid.default_main_program().global_block().var('fc.w')
            res = fluid.io.is_persistable(param)
    """
75
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
Y
Refine  
Yu Yang 已提交
76
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST:
77
        return False
78 79 80 81 82 83 84 85
    return var.persistable


def _clone_var_in_block_(block, var):
    assert isinstance(var, Variable)
    return block.create_var(
        name=var.name,
        shape=var.shape,
F
fengjiayi 已提交
86
        dtype=var.dtype,
87 88 89 90 91
        type=var.type,
        lod_level=var.lod_level,
        persistable=True)


92 93 94 95 96
def save_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
97
              filename=None):
98
    """
F
fengjiayi 已提交
99 100 101 102 103 104 105
    Save variables to the given directory by executor.

    There are two ways to specify variables to be saved: The first way, list 
    variables in a list and assign it to the `vars`. The second way, assign the 
    `main_program` with an existing program, then all variables in the program 
    will be saved. The first way has a higher priority. In other words, if `vars` 
    are assigned, the `main_program` and the `predicate` will be ignored.
106

F
fengjiayi 已提交
107 108 109 110
    The `dirname` are used to specify the folder where to save variables. 
    If you prefer to save variables in separate files in the folder `dirname`, 
    set `filename` None; if you prefer to save all variables in a single file, 
    use `filename` to specify it.
111

F
fengjiayi 已提交
112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
    Args:
        executor(Executor): The executor to run for saving variables.
        dirname(str): The directory path.
        main_program(Program|None): The program whose variables will be saved. 
                                    If it is None, the default main program will 
                                    be used automatically.
                                    Default: None
        vars(list[Variable]|None): The list that contains all variables to save. 
                                   It has a higher priority than the `main_program`.
                                   Default: None
        predicate(function|None): If it is not None, only variables in the 
                                  `main_program` that makes predicate(variable)==True 
                                  will be saved. It only works when we are using the 
                                  `main_program` to specify variables (In other words 
                                  `vars` is None).
                                  Default: None
        filename(str|None): The file which to save all variables. If you prefer to save 
                            variables separately, set it to None.
                            Default: None

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"

            # The first usage: using `main_program` to specify variables
            def name_has_fc(var):
                res = "fc" in var.name
                return res

            prog = fluid.default_main_program()
            fluid.io.save_vars(executor=exe, dirname=path, main_program=prog,
                               vars=None)
            # All variables in `main_program` whose name includes "fc" will be saved.
            # And variables are going to be saved separately.


            # The second usage: using `vars` to specify variables
            var_list = [var_a, var_b, var_c]
            fluid.io.save_vars(executor=exe, dirname=path, vars=var_list, 
                               filename="vars_file")
            # var_a, var_b and var_c will be saved. And they are going to be
            # saved in the same file named 'var_file' in the path "./my_paddle_model".
162 163
    """
    if vars is None:
164
        if main_program is None:
Y
Yu Yang 已提交
165
            main_program = default_main_program()
166
        if not isinstance(main_program, Program):
167 168 169 170 171
            raise TypeError("program should be as Program type or None")

        save_vars(
            executor,
            dirname=dirname,
172
            vars=filter(predicate, main_program.list_vars()),
173
            filename=filename)
174 175 176
    else:
        save_program = Program()
        save_block = save_program.global_block()
177 178

        save_var_map = {}
179
        for each_var in vars:
180 181 182
            # NOTE: don't save the variable which type is RAW
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
183
            new_var = _clone_var_in_block_(save_block, each_var)
184
            if filename is None:
185 186 187 188 189 190 191 192
                save_block.append_op(
                    type='save',
                    inputs={'X': [new_var]},
                    outputs={},
                    attrs={'file_path': os.path.join(dirname, new_var.name)})
            else:
                save_var_map[new_var.name] = new_var

193
        if filename is not None:
194 195 196 197
            save_var_list = []
            for name in sorted(save_var_map.keys()):
                save_var_list.append(save_var_map[name])

198
            save_block.append_op(
199 200
                type='save_combine',
                inputs={'X': save_var_list},
201
                outputs={},
202
                attrs={'file_path': os.path.join(dirname, filename)})
203

204 205 206
        executor.run(save_program)


207
def save_params(executor, dirname, main_program=None, filename=None):
208
    """
F
fengjiayi 已提交
209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244
    This function filters out all parameters from the give `main_program`
    and then save them to the folder `dirname` or the file `filename`.

    Use the `dirname` to specify the saving folder. If you would like to 
    save parameters in separate files, set `filename` None; if you would 
    like to save all parameters in a single file, use `filename` to specify 
    the file name.

    NOTICE: Some variables are not Parameter while they are necessary for 
    training. So you can NOT save and continue your training just by 
    `save_params()` and `load_params()`. Please use `save_persistables()` 
    and `load_persistables()` instead.

    Args:
        executor(Executor): The executor to run for saving parameters.
        dirname(str): The saving directory path.
        main_program(Program|None): The program whose parameters will be
                                    saved. If it is None, the default
                                    main program will be used automatically.
                                    Default: None
        filename(str|None): The file to save all parameters. If you prefer 
                            to save parameters in differnet files, set it 
                            to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.save_params(executor=exe, dirname=param_path, 
                                 main_program=None)
245 246 247 248
    """
    save_vars(
        executor,
        dirname=dirname,
249
        main_program=main_program,
250
        vars=None,
251
        predicate=is_parameter,
252
        filename=filename)
253 254


255
def save_persistables(executor, dirname, main_program=None, filename=None):
256
    """
F
fengjiayi 已提交
257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
    This function filters out all variables with `persistable==True` from the 
    give `main_program` and then saves these variables to the folder `dirname` 
    or file `filename`.

    The `dirname` is used to specify the folder where persistable variables 
    are going to be saved. If you would like to save variables in separate 
    files, set `filename` None; if you would like to save all variables in a 
    single file, use `filename` to specify the file name.

    Args:
        executor(Executor): The executor to run for saving persistable variables.
        dirname(str): The directory path.
        main_program(Program|None): The program whose persistbale variables will 
                                    be saved. If it is None, the default main 
                                    program will be used automatically.
                                    Default: None
        filename(str|None): The file to saved all variables. If you prefer to 
                            save variables in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.save_persistables(executor=exe, dirname=param_path, 
                                       main_program=None)
288 289 290 291
    """
    save_vars(
        executor,
        dirname=dirname,
292
        main_program=main_program,
293
        vars=None,
294
        predicate=is_persistable,
295
        filename=filename)
296 297


298 299 300 301 302
def load_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
303
              filename=None):
304
    """
F
fengjiayi 已提交
305 306 307 308 309 310 311 312 313 314 315 316
    Load variables from the given directory by executor.

    There are two ways to specify variables to be loaded: The first way, list 
    variables in a list and assign it to the `vars`. The second way, assign the 
    `main_program` with an existing program, then all variables in the program 
    will be loaded. The first way has a higher priority. In other words if `vars` 
    are assigned, the `main_program` and the `predicate` will be ignored.

    The `dirname` are used to specify the folder where to load variables. 
    If variables were saved in separate files in the folder `dirname`, 
    set `filename` None; if all variables were saved in a single file, 
    use `filename` to specify it.
317

F
fengjiayi 已提交
318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353
    Args:
        executor(Executor): The executor to run for loading variables.
        dirname(str): The directory path.
        main_program(Program|None): The program whose variables will be loaded. 
                                    If it is None, the default main program will 
                                    be used automatically.
                                    Default: None
        vars(list[Variable]|None): The list that contains all variables to load. 
                                   It has a higher priority than the `main_program`.
                                   Default: None
        predicate(function|None): If it is not None, only variables in the 
                                  `main_program` that makes predicate(variable)==True 
                                  will be loaded. It only works when we are using the 
                                  `main_program` to specify variables (In other words 
                                  `vars` is None).
                                  Default: None
        filename(str|None): The file which saved all required variables. If variables 
                            were saved in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Raises:
        TypeError: If `main_program` is not an instance of Program nor None.

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"

            # The first usage: using `main_program` to specify variables
            def name_has_fc(var):
                res = "fc" in var.name
                return res
354

F
fengjiayi 已提交
355 356 357 358 359 360 361 362 363 364 365 366 367
            prog = fluid.default_main_program()
            fluid.io.load_vars(executor=exe, dirname=path, main_program=prog,
                               vars=None)
            # All variables in `main_program` whose name includes "fc" will be loaded.
            # And all the variables are supposed to have been saved in differnet files.


            # The second usage: using `vars` to specify variables
            var_list = [var_a, var_b, var_c]
            fluid.io.load_vars(executor=exe, dirname=path, vars=var_list, 
                               filename="vars_file")
            # var_a, var_b and var_c will be loaded. And they are supposed to haven 
            # been saved in the same file named 'var_file' in the path "./my_paddle_model".
368 369
    """
    if vars is None:
370
        if main_program is None:
Y
Yu Yang 已提交
371
            main_program = default_main_program()
372
        if not isinstance(main_program, Program):
373 374 375 376 377
            raise TypeError("program's type should be Program")

        load_vars(
            executor,
            dirname=dirname,
378
            vars=filter(predicate, main_program.list_vars()),
379
            filename=filename)
380 381 382
    else:
        load_prog = Program()
        load_block = load_prog.global_block()
383 384

        load_var_map = {}
385 386
        for each_var in vars:
            assert isinstance(each_var, Variable)
T
tangwei12 已提交
387 388
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
389
            new_var = _clone_var_in_block_(load_block, each_var)
390
            if filename is None:
391 392 393 394 395 396 397 398
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [new_var]},
                    attrs={'file_path': os.path.join(dirname, new_var.name)})
            else:
                load_var_map[new_var.name] = new_var

399
        if filename is not None:
400 401 402 403
            load_var_list = []
            for name in sorted(load_var_map.keys()):
                load_var_list.append(load_var_map[name])

404
            load_block.append_op(
405
                type='load_combine',
406
                inputs={},
407
                outputs={"Out": load_var_list},
408
                attrs={'file_path': os.path.join(dirname, filename)})
409

410 411 412
        executor.run(load_prog)


413
def load_params(executor, dirname, main_program=None, filename=None):
414
    """
F
fengjiayi 已提交
415
    This function filters out all parameters from the give `main_program`
F
fengjiayi 已提交
416
    and then trys to load these parameters from the folder `dirname` or
F
fengjiayi 已提交
417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450
    the file `filename`.

    Use the `dirname` to specify the folder where parameters were saved. If 
    parameters were saved in separate files in the folder `dirname`, set 
    `filename` None; if all parameters were saved in a single file, use 
    `filename` to specify the file name.

    NOTICE: Some variables are not Parameter while they are necessary for 
    training. So you can NOT save and continue your training just by 
    `save_params()` and `load_params()`. Please use `save_persistables()` 
    and `load_persistables()` instead. 

    Args:
        executor(Executor): The executor to run for loading parameters.
        dirname(str): The directory path.
        main_program(Program|None): The program whose parameters will be
                                    loaded. If it is None, the default
                                    main program will be used automatically.
                                    Default: None
        filename(str|None): The file which saved all parameters. If parameters 
                            were saved in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.load_params(executor=exe, dirname=param_path, 
                                main_program=None)
451 452
    """
    load_vars(
453 454 455
        executor,
        dirname=dirname,
        main_program=main_program,
456
        predicate=is_parameter,
457
        filename=filename)
458 459


460
def load_persistables(executor, dirname, main_program=None, filename=None):
461
    """
F
fengjiayi 已提交
462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492
    This function filters out all variables with `persistable==True` from the 
    give `main_program` and then trys to load these variables from the folder 
    `dirname` or the file `filename`.

    Use the `dirname` to specify the folder where persistable variables were 
    saved. If variables were saved in separate files, set `filename` None; 
    if all variables were saved in a single file, use `filename` to specify 
    the file name.

    Args:
        executor(Executor): The executor to run for loading persistable variables.
        dirname(str): The directory path.
        main_program(Program|None): The program whose persistbale variables will 
                                    be loaded. If it is None, the default main 
                                    program will be used automatically.
                                    Default: None
        filename(str|None): The file which saved all variables. If variables were 
                            saved in differnet files, set it to None.
                            Default: None

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.load_persistables(executor=exe, dirname=param_path, 
                                       main_program=None)
493 494
    """
    load_vars(
495 496 497
        executor,
        dirname=dirname,
        main_program=main_program,
498
        predicate=is_persistable,
499
        filename=filename)
500 501


502 503
def get_inference_program(target_vars, main_program=None):
    if main_program is None:
Y
Yu Yang 已提交
504
        main_program = default_main_program()
505 506
    if not isinstance(target_vars, list):
        target_vars = [target_vars]
W
wanghaoshuang 已提交
507 508 509
    vars = []
    for var in target_vars:
        if isinstance(var, Evaluator):
W
wanghaoshuang 已提交
510 511
            vars.extend(var.states)
            vars.extend(var.metrics)
W
wanghaoshuang 已提交
512 513 514
        else:
            vars.append(var)
    pruned_program = main_program.prune(targets=vars)
515 516 517 518
    inference_program = pruned_program.inference_optimize()
    return inference_program


519 520 521
def prepend_feed_ops(inference_program,
                     feed_target_names,
                     feed_holder_name='feed'):
Q
Qiao Longfei 已提交
522 523 524
    if len(feed_target_names) == 0:
        return

K
Kexin Zhao 已提交
525 526
    global_block = inference_program.global_block()
    feed_var = global_block.create_var(
527 528 529
        name=feed_holder_name,
        type=core.VarDesc.VarType.FEED_MINIBATCH,
        persistable=True)
K
Kexin Zhao 已提交
530

531
    for i, name in enumerate(feed_target_names):
K
fix bug  
Kexin Zhao 已提交
532
        out = global_block.var(name)
K
Kexin Zhao 已提交
533 534 535
        global_block.prepend_op(
            type='feed',
            inputs={'X': [feed_var]},
K
fix bug  
Kexin Zhao 已提交
536
            outputs={'Out': [out]},
K
Kexin Zhao 已提交
537 538 539
            attrs={'col': i})


540 541 542
def append_fetch_ops(inference_program,
                     fetch_target_names,
                     fetch_holder_name='fetch'):
K
Kexin Zhao 已提交
543 544
    global_block = inference_program.global_block()
    fetch_var = global_block.create_var(
545 546 547
        name=fetch_holder_name,
        type=core.VarDesc.VarType.FETCH_LIST,
        persistable=True)
K
Kexin Zhao 已提交
548

549
    for i, name in enumerate(fetch_target_names):
K
Kexin Zhao 已提交
550 551 552 553 554 555 556
        global_block.append_op(
            type='fetch',
            inputs={'X': [name]},
            outputs={'Out': [fetch_var]},
            attrs={'col': i})


557 558 559 560
def save_inference_model(dirname,
                         feeded_var_names,
                         target_vars,
                         executor,
561
                         main_program=None,
562 563
                         model_filename=None,
                         params_filename=None):
564
    """
F
fengjiayi 已提交
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584
    Prune the given `main_program` to build a new program especially for inference,
    and then save it and all related parameters to given `dirname` by the `executor`.

    Args:
        dirname(str): The directory path to save the inference model.
        feeded_var_names(list[str]): Names of variables that need to be feeded data 
                                     during inference.
        target_vars(list[Variable]): Variables from which we can get inference 
                                     results.
        executor(Executor): The executor that saves the inference model.
        main_program(Program|None): The original program, which will be pruned to 
                                    build the inference model. If is setted None, 
                                    the default main program will be used.
                                    Default: None.
        model_filename(str|None): The name of file to save the inference program 
                                  itself. If is setted None, a default filename 
                                  `__model__` will be used.
        params_filename(str|None): The name of file to save all related parameters. 
                                   If it is setted None, parameters will be saved 
                                   in separate files .
585

F
fengjiayi 已提交
586 587 588 589 590 591 592 593 594
    Returns:
        None

    Raises:
        ValueError: If `feed_var_names` is not a list of basestring.
        ValueError: If `target_vars` is not a list of Variable.

    Examples:
        .. code-block:: python
F
fengjiayi 已提交
595

F
fengjiayi 已提交
596 597 598 599 600 601 602 603 604 605
            exe = fluid.Executor(fluid.CPUPlace())
            path = "./infer_model"
            fluid.io.save_inference_model(dirname=path, feeded_var_names=['img'],
                         target_vars=[predict_var], executor=exe)

            # In this exsample, the function will prune the default main program 
            # to make it suitable for infering the `predict_var`. The pruned 
            # inference program is going to be saved in the "./infer_model/__model__" 
            # and parameters are going to be saved in separate files under folder
            # "./infer_model". 
606 607

    """
F
fengjiayi 已提交
608 609 610
    if isinstance(feeded_var_names, basestring):
        feeded_var_names = [feeded_var_names]
    else:
Q
Qiao Longfei 已提交
611 612 613 614
        if len(feeded_var_names) > 0:
            if not (bool(feeded_var_names) and all(
                    isinstance(name, basestring) for name in feeded_var_names)):
                raise ValueError("'feed_var_names' should be a list of str.")
F
fengjiayi 已提交
615 616

    if isinstance(target_vars, Variable):
F
fengjiayi 已提交
617
        target_vars = [target_vars]
F
fengjiayi 已提交
618 619 620 621 622
    else:
        if not (bool(target_vars) and all(
                isinstance(var, Variable) for var in target_vars)):
            raise ValueError("'target_vars' should be a list of Variable.")

623
    if main_program is None:
Y
Yu Yang 已提交
624
        main_program = default_main_program()
625
    copy_program = main_program.clone()
626 627 628 629

    if not os.path.isdir(dirname):
        os.makedirs(dirname)

630
    # Clear the is_target information and remove the existed feed and fetch op
631
    global_block = copy_program.global_block()
632 633 634 635
    for i, op in enumerate(global_block.ops):
        op.desc.set_is_target(False)
        if op.type == "feed" or op.type == "fetch":
            global_block.remove_op(i)
636
    copy_program.desc.flush()
637

638
    pruned_program = copy_program.prune(targets=target_vars)
639
    inference_program = pruned_program.inference_optimize()
640 641
    fetch_var_names = [v.name for v in target_vars]

K
Kexin Zhao 已提交
642 643
    prepend_feed_ops(inference_program, feeded_var_names)
    append_fetch_ops(inference_program, fetch_var_names)
644

645 646
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
647
    else:
648 649
        model_filename = "__model__"
    model_filename = os.path.join(dirname, model_filename)
650

651 652 653 654
    if params_filename is not None:
        params_filename = os.path.basename(params_filename)

    with open(model_filename, "wb") as f:
655
        f.write(inference_program.desc.serialize_to_string())
656

657
    save_persistables(executor, dirname, inference_program, params_filename)
658 659


660 661 662 663
def load_inference_model(dirname,
                         executor,
                         model_filename=None,
                         params_filename=None):
664 665 666
    """
    Load inference model from a directory

F
fengjiayi 已提交
667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702
    Args:
        dirname(str): The directory path
        executor(Executor): The executor to run for loading inference model.
        model_filename(str|None): The name of file to load inference program.
                                  If it is None, the default filename 
                                  '__model__' will be used.
                                  Default: None
        params_filename(str|None): The name of file to load all parameters.
                                   It is only used for the case that all 
                                   parameters were saved in a single binary 
                                   file. If parameters were saved in separate 
                                   files, set it as 'None'.

    Returns:
        tuple: The return of this function is a tuple with three elements:
        (program, feed_target_names, fetch_targets). The `program` is a 
        Program, it's the program for inference. The `feed_target_names` is 
        a list of str, it contains Names of variables that need to feed 
        data in the inference program. The `fetch_targets` is a list of 
        Variable. It contains variables from which we can get inference 
        results.

    Raises:
        ValueError: If `dirname` is not a existing directory.

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            path = "./infer_model"
            [inference_program, feed_target_names, fetch_targets] = 
                fluid.io.load_inference_model(dirname=path, executor=exe)
            results = exe.run(inference_program,
                          feed={feed_target_names[0]: tensor_img},
                          fetch_list=fetch_targets)

F
fengjiayi 已提交
703
            # In this exsample, the inference program was saved in the 
F
fengjiayi 已提交
704 705 706 707 708
            # "./infer_model/__model__" and parameters were saved in 
            # separate files in ""./infer_model". 
            # After getting inference program, feed target names and 
            # fetch targets, we can use an Executor to run the inference 
            # program to get the inference result.
709

710 711 712 713
    """
    if not os.path.isdir(dirname):
        raise ValueError("There is no directory named '%s'", dirname)

714 715
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
716
    else:
717 718 719 720 721
        model_filename = "__model__"
    model_filename = os.path.join(dirname, model_filename)

    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
722

723
    with open(model_filename, "rb") as f:
724 725
        program_desc_str = f.read()

726
    program = Program.parse_from_string(program_desc_str)
727
    load_persistables(executor, dirname, program, params_filename)
728

729 730
    feed_target_names = program.desc.get_feed_target_names()
    fetch_target_names = program.desc.get_fetch_target_names()
731 732 733 734 735
    fetch_targets = [
        program.global_block().var(name) for name in fetch_target_names
    ]

    return [program, feed_target_names, fetch_targets]
X
xuwei06 已提交
736 737 738 739


def get_parameter_value(para, executor):
    """
F
fengjiayi 已提交
740 741 742 743 744 745 746 747 748 749 750
    Get the LoDTensor value of the given parameter.

    Args:
        para(Parameter): The parameter to get value from.
        executor(Executor): The executor to run for retrieving the value.

    Returns:
        numpy.array: The given parameter's values.

    Raises:
        AssertionError: If the `para` is not an instance of Parameter.
X
xuwei06 已提交
751

F
fengjiayi 已提交
752 753
    Examples:
        .. code-block:: python
X
xuwei06 已提交
754

F
fengjiayi 已提交
755 756 757
            exe = fluid.Executor(fluid.CPUPlace())
            param = fluid.default_main_program().global_block().var('fc.w')
            p = fluid.io.get_parameter_value(param, exe)
758

X
xuwei06 已提交
759
    """
X
xuwei06 已提交
760 761
    assert is_parameter(para)

X
xuwei06 已提交
762 763 764 765 766 767 768 769
    get_program = Program()
    block = get_program.global_block()
    new_var = _clone_var_in_block_(block, para)
    return executor.run(get_program, feed={}, fetch_list=[new_var])[0]


def get_parameter_value_by_name(name, executor, program=None):
    """
F
fengjiayi 已提交
770
    Get the LoDTensor value of a certain parameter by its name.
X
xuwei06 已提交
771

F
fengjiayi 已提交
772 773 774 775 776 777 778
    Args:
        name(str): The parameter's name.
        executor(Executor): The executor to run for retrieving the value.
        program(Program | None): The program where to find the parameter.
                               If it's set to be None, the function will
                               try to find the parameter in the default
                               main program.
X
xuwei06 已提交
779

F
fengjiayi 已提交
780 781
    Returns:
        numpy.array: The parameter's values.
782

F
fengjiayi 已提交
783 784 785 786 787
    Raises:
        TypeError: If given `name` is not an instance of basestring.
        TypeError: If the parameter with the given name doesn't exist.
        AssertionError: If there is a varibale named `name` in the
                        given program but it is not a Parameter.
788

F
fengjiayi 已提交
789 790 791 792 793
    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            p = fluid.io.get_parameter_value('fc.w', exe)
X
xuwei06 已提交
794 795
    """
    if program is None:
Y
Yu Yang 已提交
796
        program = default_main_program()
X
xuwei06 已提交
797 798
    var = program.global_block().var(name)
    return get_parameter_value(var, executor)
T
tangwei12 已提交
799 800


T
tangwei12 已提交
801
SUCCESS_MARK_FILENAME = "_SUCCESS"
802
CHECKPOINT_PREFIX = "checkpoint"
T
tangwei12 已提交
803
MODEL_DIR = "__model__"
804
LOOKUP_TABLE_DIR = "__lookup_table__"
T
tangwei12 已提交
805
TRAINER_PREFIX = "trainer"
806
CHECKPOINT_SEPARATOR = "_"
T
tangwei12 已提交
807 808 809


def save_checkpoint(executor,
T
tangwei12 已提交
810
                    checkpoint_dir,
T
tangwei12 已提交
811 812
                    trainer_id,
                    trainer_args=None,
T
tangwei12 已提交
813
                    main_program=None,
T
tangwei12 已提交
814 815
                    max_num_checkpoints=3,
                    lookup_table=None,
T
bug fix  
tangwei12 已提交
816
                    ps_endpoint_list=None):
T
tangwei12 已提交
817
    """
F
fengjiayi 已提交
818
    This function filters out all checkpoint variables from the give
F
fengjiayi 已提交
819
    main_program and then saves these variables to the `checkpoint_dir` 
F
fengjiayi 已提交
820 821 822 823
    directory.

    In the training precess, we generally save a checkpoint in each
    iteration. So there might be a lot of checkpoints in the 
F
fengjiayi 已提交
824
    `checkpoint_dir`. To avoid them taking too much disk space, the 
F
fengjiayi 已提交
825 826
    `max_num_checkpoints` are introduced to limit the total number of 
    checkpoints. If the number of existing checkpints is greater than 
F
fengjiayi 已提交
827
    the `max_num_checkpoints`, oldest ones will be scroll deleted.
F
fengjiayi 已提交
828

F
fengjiayi 已提交
829 830
    A variable is a checkpoint variable and will be saved if it meets
    all following conditions:
F
fengjiayi 已提交
831 832 833
        1. It's persistable.
        2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
        3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
T
tangwei12 已提交
834

F
fengjiayi 已提交
835 836 837 838 839 840 841 842 843 844 845 846 847
    Args:
        executor(Executor): The executor to run for save checkpoint.
        checkpoint_dir(str): The folder where to save checkpoints.
        trainer_id(int): currect trainer id, if id is equal to 0, the trainer 
            is chief.
        trainer_args(dict|None): Current training arguments. Such as 'epoch_id' 
            and 'step_id'.
            Defaut: None
        main_program(Program|None): The program whose checkpoint variables will
            be saved. If it is None, the default main program will be used.
        max_num_checkpoints(int): The max number of total number of existing 
            checkpoints.
            Default: 3
T
tangwei12 已提交
848 849 850 851 852 853
        lookup_table(string|None): the lookup table name, when use distribute
            lookup table, we can get lookup table name by DistributeTranspiler.
            table_name 
        ps_endpoint_list(list|None): the parameter server ip:port list.  
            when use distribute lookup table, we can get ps_endpoint_list by 
            distribute arguments.
F
fengjiayi 已提交
854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869

    Returns:
        None

    Raises:
        ValueError: If `checkpoint_dir` is None.
        AssertionError: If `trainer_args` is not a dict.

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            path = "./checkpoints"
            prog = fluid.default_main_program()
            trainer_args = {"epoch_id": 200,
                            "step_id": 20} # just an example
T
tangwei12 已提交
870 871 872
            table_name = "share_w"
            ps_endpoints = ["127.0.0.1:6000","127.0.0.1:6001"]

F
fengjiayi 已提交
873 874 875 876 877
            fluid.io.save_checkpoint(executor=exe,
                                     checkpoint_dir=path,
                                     trainer_id=0,
                                     trainer_args=trainer_args,
                                     main_program=prog,
T
tangwei12 已提交
878 879 880
                                     max_num_checkpoints=3,
                                     lookup_table=table_name,
                                     ps_endpoint_list = ps_endpoints)
T
tangwei12 已提交
881 882
    """
    if checkpoint_dir is None:
T
tangwei12 已提交
883
        raise ValueError("'checkpoint_dir' should not be None")
T
tangwei12 已提交
884
    assert checkpoint_dir
T
tangwei12 已提交
885

T
tangwei12 已提交
886 887
    if trainer_args:
        assert isinstance(trainer_args, dict)
T
tangwei12 已提交
888

T
bug fix  
tangwei12 已提交
889 890
    is_chief = trainer_id == 0

T
bug fix  
tangwei12 已提交
891
    _make_chekcpoint_dirs(checkpoint_dir)
T
tangwei12 已提交
892
    serial = get_latest_checkpoint_serial(checkpoint_dir) + 1
T
tangwei12 已提交
893
    cur_dir = _get_serial_dir(checkpoint_dir, serial)
T
tangwei12 已提交
894

T
tangwei12 已提交
895 896
    save_trainer_args(cur_dir, trainer_id, trainer_args)

T
bug fix  
tangwei12 已提交
897
    if is_chief:
T
tangwei12 已提交
898
        save_persist_vars_without_grad(executor, cur_dir, main_program)
T
tangwei12 已提交
899

T
bug fix  
tangwei12 已提交
900 901 902
    if is_chief and lookup_table and ps_endpoint_list:
        save_pserver_vars_by_notify(executor, cur_dir, lookup_table,
                                    ps_endpoint_list)
T
tangwei12 已提交
903

T
tangwei12 已提交
904
    _scroll_delete(checkpoint_dir, max_num_checkpoints)
T
tangwei12 已提交
905 906


T
tangwei12 已提交
907
def load_checkpoint(executor, checkpoint_dir, serial, main_program):
T
tangwei12 已提交
908
    """
F
fengjiayi 已提交
909 910
    This function filters out all checkpoint variables from the give
    main_program and then try to load these variables from the
F
fengjiayi 已提交
911
    `checkpoint_dir` directory.
F
fengjiayi 已提交
912 913

    In the training precess, we generally save a checkpoint in each
F
fengjiayi 已提交
914 915
    iteration. So there are more than one checkpoint in the 
    `checkpoint_dir` (each checkpoint has its own sub folder), use 
F
fengjiayi 已提交
916
    `serial` to specify which serial of checkpoint you would like to
F
fengjiayi 已提交
917 918 919
    load.

    A variable is a checkpoint variable and will be loaded if it meets
F
fengjiayi 已提交
920
    all following conditions:
F
fengjiayi 已提交
921 922 923 924 925 926 927 928 929 930
        1. It's persistable.
        2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
        3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".

    Args:
        executor(Executor): The executor to run for loading checkpoint.
        checkpoint_dir(str): The folder where all checkpoints are.
        serial(int): The serial of checkpoint you would like to load.
        main_program(Program): The program whose checkpoint variables will
                               be loaded.
T
tangwei12 已提交
931

F
fengjiayi 已提交
932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952
    Returns:
        None

    Raises:
        ValueError: If `checkpoint_dir` is None.
        ValueError: If `serial` is None or `serial` is less than 0.
        ValueError: If `main_program` is None.

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            path = "./checkpoints"
            prog = fluid.default_main_program()
            fluid.io.load_checkpoint(executor=exe, checkpoint_dir=path,
                    serial=9, main_program=prog)

            # In this example, `load_checkpoint` function
            # will first filters out all checkpoint variables in the default
            # main program, and then try to load these variables form the
            # folder "./checkpoints/checkpoint_9/__model__".
T
tangwei12 已提交
953
    """
T
tangwei12 已提交
954

T
tangwei12 已提交
955
    if checkpoint_dir is None:
T
tangwei12 已提交
956
        raise ValueError("'checkpoint_dir' should not be None")
T
tangwei12 已提交
957

T
tangwei12 已提交
958
    if serial is None or serial < 0:
T
tangwei12 已提交
959
        raise ValueError("'serial' should not be None or <0 ")
T
tangwei12 已提交
960

T
tangwei12 已提交
961
    if main_program is None:
T
tangwei12 已提交
962
        raise ValueError('main_program should not be None.')
963

T
tangwei12 已提交
964
    cur_dir = _get_serial_dir(checkpoint_dir, serial)
T
tangwei12 已提交
965
    load_persist_vars_without_grad(executor, cur_dir, main_program, True)
T
tangwei12 已提交
966 967


T
tangwei12 已提交
968 969
def clean_checkpoint(checkpoint_dir, delete_dir=False):
    """
T
tangwei12 已提交
970 971
    clean the checkpoint dir, when the train exits normally, 
    the trainer will call clean_checkpoint to delete checkpoint directory saved before.
T
tangwei12 已提交
972
    delete_dir only works when the directory is empty, otherwise, OSError is raised.  
973

F
fengjiayi 已提交
974 975
    : param checkpoint_dir
    : param delete_dir
T
tangwei12 已提交
976
    """
977

T
tangwei12 已提交
978
    if checkpoint_dir is None:
T
tangwei12 已提交
979
        raise ValueError("'checkpoint_dir' should not be None")
T
tangwei12 已提交
980
    _scroll_delete(checkpoint_dir, max_num_checkpoints=0)
T
tangwei12 已提交
981 982 983 984 985

    if delete_dir and not os.listdir(checkpoint_dir):
        os.rmdir(checkpoint_dir)


T
tangwei12 已提交
986 987 988 989
def load_persist_vars_without_grad(executor,
                                   dirname,
                                   program,
                                   has_model_dir=False):
T
tangwei12 已提交
990
    """
F
fengjiayi 已提交
991
    This function filters out all checkpoint variables from the give
F
fengjiayi 已提交
992
    program and then trys to load these variables from the given directory.
F
fengjiayi 已提交
993

F
fengjiayi 已提交
994
    A variable is a checkpoint variable if it meets all following
F
fengjiayi 已提交
995 996 997 998
    conditions:
        1. It's persistable.
        2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
        3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".
999

F
fengjiayi 已提交
1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
    Args:
        executor(Executor): The executor to run for loading variables.
        dirname(str): The directory path.
        program(Program): The program whose checkpoint variables will
                          be loaded.
        has_model_dir(bool): if True, the function loads variables
                             from a sub directory named '__model__'.
                             Default: False

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.load_persist_vars_without_grad(executor=exe,
                    dirname=param_path, program=prog, has_model_dir=True)

            # In this example, `load_persist_vars_without_grad` function
            # will first filters out all checkpoint variables in the default
            # main program, and then trys to load these variables form the
            # folder "./my_paddle_model/__model__".
T
tangwei12 已提交
1025 1026
    """

T
tangwei12 已提交
1027
    if has_model_dir:
T
tangwei12 已提交
1028 1029 1030 1031 1032 1033 1034 1035 1036 1037
        dirname = _get_model_dir(dirname)

    load_vars(
        executor,
        dirname=dirname,
        main_program=program,
        predicate=_is_checkpoint_var,
        filename=None)


T
bug fix  
tangwei12 已提交
1038
def load_lookup_table_vars(executor, dirname, program, pserver_id, table_name):
T
tangwei12 已提交
1039 1040 1041 1042 1043 1044 1045 1046 1047 1048
    """
    The parameter server will load lookup table's local file in 
    selectedrows variable.

    Args:
        executor(Executor): The executor to run for loading persistable variables
        dirname(str): The directory path
        main_program(Program): Find the variable named table_name in main_program
        pserver_id(int): the serial number in pserver_endpoints list
        table_name(str): lookup table name
T
tangwei12 已提交
1049

T
tangwei12 已提交
1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064
    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            dirname = "./checkpoints/checkpoint_9/__model__"
            prog = fluid.default_main_program()
            pserver_id = 1
            table_name = "share_w"
            fluid.io.load_lookup_table_vars(executor=exe,
                    dirname=dirname, program=prog, pserver_id=pserver_id,
                    table_name=table_name)
    """
T
bug fix  
tangwei12 已提交
1065 1066 1067 1068 1069 1070 1071 1072

    for var in program.list_vars():
        if var.name == table_name:
            lookup_table_var = var
            break

    assert lookup_table_var is not None

1073
    lookup_table_dir = os.path.join(dirname, LOOKUP_TABLE_DIR)
T
bug fix  
tangwei12 已提交
1074 1075 1076 1077 1078 1079 1080 1081 1082 1083
    table_file = table_name + CHECKPOINT_SEPARATOR + str(pserver_id)

    load_prog = Program()
    load_block = load_prog.global_block()

    load_block.append_op(
        type='load',
        inputs={},
        outputs={'Out': [lookup_table_var]},
        attrs={'file_path': os.path.join(lookup_table_dir, table_file)})
1084

T
bug fix  
tangwei12 已提交
1085
    executor.run(load_prog)
1086 1087


T
tangwei12 已提交
1088 1089
def save_persist_vars_without_grad(executor, dirname, program):
    """
F
fengjiayi 已提交
1090 1091 1092 1093
    This function filters out all checkpoint variables from the give
    program and then save these variables to a sub-folder '__model__' of 
    the given directory.

F
fengjiayi 已提交
1094
    A variable is a checkpoint variable if it meets all following
F
fengjiayi 已提交
1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116
    conditions:
        1. It's persistable.
        2. It's type is not FEED_MINIBATCH nor FETCH_LIST nor RAW.
        3. It's name contains no "@GRAD" nor ".trainer_" nor ".block".

    Args:
        executor(Executor): The executor to run for saving variables.
        dirname(str): The directory path.
        program(Program): The program whose checkpoint variables will
                          be saved.

    Returns:
        None

    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            fluid.io.save_persist_vars_without_grad(executor=exe,
                    dirname=param_path, program=prog)
1117

F
fengjiayi 已提交
1118 1119 1120 1121
            # In this example, `save_persist_vars_without_grad` function
            # will first filters out all checkpoint variables in the default
            # main program, and then saves these variables to the folder 
            # "./my_paddle_model/__model__".
T
tangwei12 已提交
1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133
    """
    cur_dir = _get_model_dir(dirname)
    save_vars(
        executor,
        dirname=cur_dir,
        main_program=program,
        vars=None,
        predicate=_is_checkpoint_var,
        filename=None)
    _write_success(cur_dir)


T
bug fix  
tangwei12 已提交
1134 1135
def save_pserver_vars_by_notify(executor, dirname, lookup_table,
                                ps_endpoint_list):
T
tangwei12 已提交
1136
    """
T
tangwei12 已提交
1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165
    This function will send checkpoint notify message from Trainer 0
    to all the pservers.
    The checkpoint notify message contains lookup table name, 
    the absolute path on pserver to save lookup_table.

    Args:
        executor(Executor): The executor to run for send checkpoint notify.
        dirname(str): The folder where to save checkpoints.
        lookup_table(string): the lookup table name, when use distribute
            lookup table, we can get lookup table name by DistributeTranspiler.
            table_name 
        ps_endpoint_list(list): the parameter server ip:port list.  
            when use distribute lookup table, we can get ps_endpoint_list by 
            distribute arguments.
    Return:
        None
    
    Examples:
        .. code-block:: python

            exe = fluid.Executor(fluid.CPUPlace())
            param_path = "./my_paddle_model"
            prog = fluid.default_main_program()
            table_name = "share_w"
            ps_endpoints = ["127.0.0.1:6000","127.0.0.1:6001"]

            fluid.io.save_pserver_vars_by_notify(executor=exe,
                    dirname=param_path, lookup_table=table_name, 
                    ps_endpoint_list=ps_endpoints)
T
tangwei12 已提交
1166 1167 1168 1169 1170 1171 1172
    """
    cur_dir = _get_lookuptable_dir(dirname)

    checkpoint_notify_program = Program()
    checkpoint_notify_block = checkpoint_notify_program.global_block()

    attrs = {}
T
tangwei12 已提交
1173
    attrs['epmap'] = ps_endpoint_list
T
tangwei12 已提交
1174
    attrs['dir'] = cur_dir
T
tangwei12 已提交
1175
    attrs['lookup_table'] = lookup_table
T
tangwei12 已提交
1176 1177

    checkpoint_notify_block.append_op(
T
tangwei12 已提交
1178
        type='checkpoint_notify', inputs={}, outputs={}, attrs=attrs)
T
tangwei12 已提交
1179 1180 1181
    executor.run(checkpoint_notify_program)


T
tangwei12 已提交
1182
def save_trainer_args(dirname, trainer_id, trainer_args):
T
tangwei12 已提交
1183 1184
    assert isinstance(trainer_args, dict)

T
tangwei12 已提交
1185 1186 1187 1188 1189 1190 1191 1192 1193 1194
    cur_dir = _get_trainer_dir(dirname, trainer_id)

    for name, value in trainer_args.iteritems():
        args_file = os.path.join(cur_dir, name)
        with open(args_file, 'w') as f:
            f.write(str(value))
    _write_success(cur_dir)


def load_trainer_args(checkpoint_dir, serial, trainer_id, trainer_args):
T
tangwei12 已提交
1195
    """
T
tangwei12 已提交
1196
    trainer will load some args from it's independent directory, 
T
tangwei12 已提交
1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217
    such as epoch_id and step_id.

    Args:
        checkpoint_dir(str): The folder where all checkpoints are.
        serial(int): The serial of checkpoint you would like to load.
        trainer_id(int): current trainer id.
        trainer_args(list): list about load trainer args
    Return:
        None

    Examples:
        .. code-block:: python

            param_path = "./checkpoint/"
            serial = 7
            trainer_id = 2
            trainer_args = ["epoch_id", "step_id"]

            fluid.io.load_trainer_args(checkpoint_dir=param_path, serial=serial,
            trainer_id=trainer_id, trainer_args=trainer_args)
    """
T
tangwei12 已提交
1218 1219
    assert isinstance(trainer_args, list)

T
tangwei12 已提交
1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232
    cur_dir = _get_serial_dir(checkpoint_dir, serial)
    cur_dir = _get_trainer_dir(cur_dir, trainer_id)

    ret_values = []

    for arg in trainer_args:
        cur_file = os.path.join(cur_dir, arg)
        with open(cur_file, 'r') as f:
            contents = f.read()
            ret_values.append(contents.strip())
    return ret_values


T
tangwei12 已提交
1233
def _is_checkpoint_var(var):
T
tangwei12 已提交
1234
    """
T
tangwei12 已提交
1235 1236 1237
    the checkpoint will not save or load all the variables.
    var type is FEED_MINIBATCH/FETCH_LIST/RAW or var name ends with @GRAD are discarded.

T
tangwei12 已提交
1238
    : param var(Variable)
T
tangwei12 已提交
1239
    """
T
tangwei12 已提交
1240 1241 1242 1243
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
            var.desc.type() == core.VarDesc.VarType.RAW:
        return False
T
tangwei12 已提交
1244
    # @GRAD are named for gradient variables, checkpoint will not save it.
T
tangwei12 已提交
1245 1246
    if "@GRAD" in var.name:
        return False
T
tangwei12 已提交
1247
    # .trainer_ are named for distribute train variables, checkpoint will not save it.
T
tangwei12 已提交
1248 1249 1250
    if ".trainer_" in var.name:
        return False

T
tangwei12 已提交
1251
    # .block is named for distribute train variables, checkpoint will not save it.
T
tangwei12 已提交
1252
    if ".block" in var.name:
T
tangwei12 已提交
1253 1254 1255
        return False

    return var.persistable
T
tangwei12 已提交
1256 1257


T
bug fix  
tangwei12 已提交
1258
def _make_chekcpoint_dirs(dirs):
T
add doc  
tangwei12 已提交
1259 1260 1261
    """
    _make_chekcpoint_dirs will makdir local directory directly, when the directory is exist, it will igore it.
    """
T
bug fix  
tangwei12 已提交
1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274
    assert dirs is not None

    if os.path.isfile(dirs):
        raise OSError(errno.ENOTDIR, "dirs path shoule be a Directory.", dirs)

    if not os.path.isdir(dirs):
        try:
            os.makedirs(dirs)
        except OSError as err:
            if err.errno != errno.EEXIST:
                raise err


T
tangwei12 已提交
1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286
def _get_dir_serial(dirname):
    _, serial = dirname.split(CHECKPOINT_SEPARATOR)

    try:
        serial_num = int(serial)
    except ValueError:
        serial_num = -1
    return serial_num


def _get_serial_dir(dirname, serial):
    serial_folder = CHECKPOINT_PREFIX + CHECKPOINT_SEPARATOR + str(serial)
T
tangwei12 已提交
1287
    serial_dir = os.path.join(dirname, serial_folder)
T
bug fix  
tangwei12 已提交
1288
    _make_chekcpoint_dirs(serial_dir)
T
tangwei12 已提交
1289 1290 1291 1292 1293 1294

    return serial_dir


def _get_model_dir(dirname):
    model_dir = os.path.join(dirname, MODEL_DIR)
T
bug fix  
tangwei12 已提交
1295
    _make_chekcpoint_dirs(model_dir)
T
tangwei12 已提交
1296 1297 1298
    return model_dir


T
tangwei12 已提交
1299 1300
def _get_lookuptable_dir(dirname):
    lookuptable_dir = os.path.join(dirname, LOOKUP_TABLE_DIR)
T
bug fix  
tangwei12 已提交
1301
    _make_chekcpoint_dirs(lookuptable_dir)
T
tangwei12 已提交
1302 1303 1304
    return lookuptable_dir


T
tangwei12 已提交
1305 1306 1307
def _get_trainer_dir(dirname, trainer_id):
    trainer_folder = TRAINER_PREFIX + CHECKPOINT_SEPARATOR + str(trainer_id)
    trainer_dir = os.path.join(dirname, trainer_folder)
T
bug fix  
tangwei12 已提交
1308
    _make_chekcpoint_dirs(trainer_dir)
T
tangwei12 已提交
1309
    return trainer_dir
T
tangwei12 已提交
1310 1311


T
tangwei12 已提交
1312
def _scroll_delete(dirname, max_num_checkpoints=3):
T
tangwei12 已提交
1313
    dirs = os.listdir(dirname)
T
tangwei12 已提交
1314
    serial_map = {}
T
tangwei12 已提交
1315
    for serial in dirs:
T
tangwei12 已提交
1316 1317
        serial_num = _get_dir_serial(serial)
        serial_map[serial_num] = serial
T
tangwei12 已提交
1318

T
tangwei12 已提交
1319
    if len(serial_map.keys()) <= max_num_checkpoints:
T
tangwei12 已提交
1320 1321
        return

T
tangwei12 已提交
1322
    serials = serial_map.keys()
T
tangwei12 已提交
1323
    serials.sort(reverse=True)
T
tangwei12 已提交
1324
    serials = serials[max_num_checkpoints:]
T
tangwei12 已提交
1325
    for serial in serials:
T
tangwei12 已提交
1326
        cur_dir = _get_serial_dir(dirname, serial)
T
bug fix  
tangwei12 已提交
1327 1328 1329 1330 1331
        try:
            shutil.rmtree(cur_dir)
        except OSError as err:
            if err.errno != errno.ENOENT:
                raise err
T
tangwei12 已提交
1332 1333


T
tangwei12 已提交
1334 1335
def _write_success(dirname):
    """
T
tangwei12 已提交
1336
    write an empty file named "_SUCCESS" in checkpoint dir, indicate this checkpoint is correct.
T
tangwei12 已提交
1337

F
fengjiayi 已提交
1338
    : param dirname
T
tangwei12 已提交
1339
    """
T
tangwei12 已提交
1340
    success_file = os.path.join(dirname, SUCCESS_MARK_FILENAME)
T
bug fix  
tangwei12 已提交
1341
    with open(success_file, 'a') as f:
1342
        now = time.ctime()
T
bug fix  
tangwei12 已提交
1343
        f.write(now)
T
tangwei12 已提交
1344 1345


T
tangwei12 已提交
1346
def get_latest_checkpoint_serial(checkpoint_dir):
T
tangwei12 已提交
1347
    """
T
tangwei12 已提交
1348 1349
    get the latest file in checkpoint directory, the _SUCCESS file must exist in the directory

F
fengjiayi 已提交
1350
    : param checkpoint_dir
T
tangwei12 已提交
1351
    """
T
tangwei12 已提交
1352
    if not checkpoint_dir:
T
tangwei12 已提交
1353
        return -1
T
tangwei12 已提交
1354 1355 1356 1357 1358 1359

    def has_success(checkpoint_dir, cur_dir):
        """
        is _SUCCESS in this dir
        """

T
tangwei12 已提交
1360
        serial = _get_dir_serial(cur_dir)
T
tangwei12 已提交
1361 1362
        if serial == -1 or not os.path.isdir(
                os.path.join(checkpoint_dir, cur_dir)):
1363 1364 1365
            return -1

        success_path = os.path.join(
T
tangwei12 已提交
1366 1367
            _get_serial_dir(checkpoint_dir, serial), MODEL_DIR,
            SUCCESS_MARK_FILENAME)
T
tangwei12 已提交
1368
        if os.path.isfile(success_path):
T
tangwei12 已提交
1369
            return serial
T
tangwei12 已提交
1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380

    if not os.path.isdir(checkpoint_dir):
        return -1

    current_dir = -1
    dirs = os.listdir(checkpoint_dir)
    for cur_dir in dirs:
        success_num = has_success(checkpoint_dir, cur_dir)
        if success_num > current_dir:
            current_dir = success_num
    return current_dir
1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398


def get_test_program(filelist, program=None, startup_program=None):
    """
    Transpile current train program to a program to read test dataset
    if the program is using reader ops like "open_files_op".
    """

    def _copy_reader_var_(block, var, new_name=None):
        if new_name == None:
            new_name = var.name
        new_var = block.create_var(
            name=str(new_name), type=core.VarDesc.VarType.READER)
        new_var.desc.set_shapes(var.desc.shapes())
        new_var.desc.set_dtypes(var.desc.dtypes())
        new_var.persistable = True
        return new_var

F
fengjiayi 已提交
1399
    def _get_test_reader_name(train_reader_name):
1400 1401
        return train_reader_name + "_test"

F
fengjiayi 已提交
1402
    def _is_reader_op(op):
1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419
        block = op.block
        if "Out" in op.output_names:
            reader_out = block.vars[op.output("Out")[0]]
            if reader_out.type == core.VarDesc.VarType.READER:
                return True
        return False

    if program == None:
        program = default_main_program()
    if startup_program == None:
        startup_program = default_startup_program()
    startup_block = startup_program.global_block()

    # 1. find out the orignal reader var name
    startup_reader_op_list = []

    for op in startup_block.ops:
F
fengjiayi 已提交
1420
        if _is_reader_op(op):
1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435
            startup_reader_op_list.append(op)

    if len(startup_reader_op_list) == 0:
        return program

    root_reader_op = startup_reader_op_list[0]
    train_test_reader_map = {}
    # 2. add operators to startup to read open and read test data files
    for op in startup_reader_op_list:
        assert (len(op.output("Out")) == 1)
        train_reader_name = op.output("Out")[0]
        train_reader = startup_block.vars[train_reader_name]
        test_reader = _copy_reader_var_(
            startup_block,
            train_reader,
F
fengjiayi 已提交
1436
            new_name=_get_test_reader_name(train_reader_name))
1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466
        train_test_reader_map[train_reader.name] = test_reader

        test_op_inputs = {}
        for name in op.input_names:
            train_arg_names = op.input(name)
            test_arg_vars = []
            for arg_name in train_arg_names:
                arg_var = train_test_reader_map[
                    arg_name] if name == "UnderlyingReader" else startup_block.vars[
                        arg_name]
                test_arg_vars.append(arg_var)
            test_op_inputs[name] = test_arg_vars

        test_op = startup_block.append_op(
            type=op.type,
            inputs=test_op_inputs,
            outputs={'Out': [test_reader]},
            attrs=op.attrs)
        # root reader op's filelist attr for read test files
        if op.type == root_reader_op.type:
            test_op.set_attr("file_names", filelist)
        if op.type == "create_multi_pass_reader":
            test_op.set_attr("pass_num", 1)

    # 3. rename reader vars in inference program to different name
    #    to avoid read from train data.
    main_block = program.global_block()
    for var in main_block.vars.values():
        if var.type == core.VarDesc.VarType.READER:
            main_block.rename_var(
F
fengjiayi 已提交
1467
                str(var.name), str(_get_test_reader_name(var.name)))
1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478

    for op in main_block.ops:
        if op.type == root_reader_op.type:
            test_op.set_attr("file_names", filelist)
        if op.type == "create_multi_pass_reader":
            test_op.set_attr("pass_num", 1)

    startup_program.sync_with_cpp()
    program.sync_with_cpp()

    return program