io.py 20.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import os
T
tangwei12 已提交
16 17
import time
import shutil
18

19 20
from paddle.fluid.evaluator import Evaluator
from paddle.fluid.framework import Program, Parameter, default_main_program, Variable
K
fix bug  
Kexin Zhao 已提交
21
from . import core
22 23

__all__ = [
T
tangwei12 已提交
24 25
    'save_vars', 'save_params', 'save_persistables', 'load_vars', 'load_params',
    'load_persistables', 'save_inference_model', 'load_inference_model',
T
tangwei12 已提交
26 27
    'get_inference_program', 'save_checkpoint', 'load_checkpoint',
    'clean_checkpoint'
28 29 30 31
]


def is_parameter(var):
K
Kavya Srinet 已提交
32
    """Check whether the variable is a Parameter.
33 34 35 36 37 38 39

    This function checks whether the input variable is a Parameter.

    Args:
        var : The input variable.

    Returns:
K
Kavya Srinet 已提交
40
        boolean result whether the variable is a Parameter.
41
    """
42 43 44 45
    return isinstance(var, Parameter)


def is_persistable(var):
46
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
Y
Refine  
Yu Yang 已提交
47
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST:
48
        return False
49 50 51 52 53 54 55 56
    return var.persistable


def _clone_var_in_block_(block, var):
    assert isinstance(var, Variable)
    return block.create_var(
        name=var.name,
        shape=var.shape,
F
fengjiayi 已提交
57
        dtype=var.dtype,
58 59 60 61 62
        type=var.type,
        lod_level=var.lod_level,
        persistable=True)


63 64 65 66 67
def save_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
68
              filename=None):
69 70
    """
    Save variables to directory by executor.
71

72 73
    :param executor: executor that save variable
    :param dirname: directory path
X
xuwei06 已提交
74
    :param main_program: program. If vars is None, then filter all variables in this
75
    program which fit `predicate`. Default default_main_program.
76
    :param predicate: The Predicate describes a callable that returns a variable
77 78
    as a bool. If it returns true, the corresponding input variable will be saved.
    :param vars: variables need to be saved. If vars is specified, program & predicate
79
    will be ignored
80 81
    :param filename: The name of a single file that all vars are saved to.
        If it is None, save variables to separate files.
82

83 84 85
    :return: None
    """
    if vars is None:
86
        if main_program is None:
Y
Yu Yang 已提交
87
            main_program = default_main_program()
88
        if not isinstance(main_program, Program):
89 90 91 92 93
            raise TypeError("program should be as Program type or None")

        save_vars(
            executor,
            dirname=dirname,
94
            vars=filter(predicate, main_program.list_vars()),
95
            filename=filename)
96 97 98
    else:
        save_program = Program()
        save_block = save_program.global_block()
99 100

        save_var_map = {}
101
        for each_var in vars:
102 103 104
            # NOTE: don't save the variable which type is RAW
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
105
            new_var = _clone_var_in_block_(save_block, each_var)
106
            if filename is None:
107 108 109 110 111 112 113 114
                save_block.append_op(
                    type='save',
                    inputs={'X': [new_var]},
                    outputs={},
                    attrs={'file_path': os.path.join(dirname, new_var.name)})
            else:
                save_var_map[new_var.name] = new_var

115
        if filename is not None:
116 117 118 119
            save_var_list = []
            for name in sorted(save_var_map.keys()):
                save_var_list.append(save_var_map[name])

120
            save_block.append_op(
121 122
                type='save_combine',
                inputs={'X': save_var_list},
123
                outputs={},
124
                attrs={'file_path': os.path.join(dirname, filename)})
125

126 127 128
        executor.run(save_program)


129
def save_params(executor, dirname, main_program=None, filename=None):
130 131 132 133 134 135
    """
    Save all parameters to directory with executor.
    """
    save_vars(
        executor,
        dirname=dirname,
136
        main_program=main_program,
137
        vars=None,
138
        predicate=is_parameter,
139
        filename=filename)
140 141


142
def save_persistables(executor, dirname, main_program=None, filename=None):
143 144 145 146 147 148
    """
    Save all persistables to directory with executor.
    """
    save_vars(
        executor,
        dirname=dirname,
149
        main_program=main_program,
150
        vars=None,
151
        predicate=is_persistable,
152
        filename=filename)
153 154


155 156 157 158 159
def load_vars(executor,
              dirname,
              main_program=None,
              vars=None,
              predicate=None,
160
              filename=None):
161 162
    """
    Load variables from directory by executor.
163

164
    :param executor: executor that load variable
165
    :param dirname: directory path
X
xuwei06 已提交
166
    :param main_program: program. If vars is None, then filter all variables in this
Y
Yu Yang 已提交
167
    program which fit `predicate`. Default default_main_program().
168
    :param predicate: The Predicate describes a callable that returns a variable
169 170
    as a bool. If it returns true, the corresponding input variable will be loaded.
    :param vars: variables need to be loaded. If vars is specified, program &
171
    predicate will be ignored
172 173
    :param filename: The name of the single file that all vars are loaded from.
        If it is None, load variables from separate files.
174

175 176 177
    :return: None
    """
    if vars is None:
178
        if main_program is None:
Y
Yu Yang 已提交
179
            main_program = default_main_program()
180
        if not isinstance(main_program, Program):
181 182 183 184 185
            raise TypeError("program's type should be Program")

        load_vars(
            executor,
            dirname=dirname,
186
            vars=filter(predicate, main_program.list_vars()),
187
            filename=filename)
188 189 190
    else:
        load_prog = Program()
        load_block = load_prog.global_block()
191 192

        load_var_map = {}
193 194
        for each_var in vars:
            assert isinstance(each_var, Variable)
T
tangwei12 已提交
195 196
            if each_var.type == core.VarDesc.VarType.RAW:
                continue
197
            new_var = _clone_var_in_block_(load_block, each_var)
198
            if filename is None:
199 200 201 202 203 204 205 206
                load_block.append_op(
                    type='load',
                    inputs={},
                    outputs={'Out': [new_var]},
                    attrs={'file_path': os.path.join(dirname, new_var.name)})
            else:
                load_var_map[new_var.name] = new_var

207
        if filename is not None:
208 209 210 211
            load_var_list = []
            for name in sorted(load_var_map.keys()):
                load_var_list.append(load_var_map[name])

212
            load_block.append_op(
213
                type='load_combine',
214
                inputs={},
215
                outputs={"Out": load_var_list},
216
                attrs={'file_path': os.path.join(dirname, filename)})
217

218 219 220
        executor.run(load_prog)


221
def load_params(executor, dirname, main_program=None, filename=None):
222 223 224 225
    """
    load all parameters from directory by executor.
    """
    load_vars(
226 227 228
        executor,
        dirname=dirname,
        main_program=main_program,
229
        predicate=is_parameter,
230
        filename=filename)
231 232


233
def load_persistables(executor, dirname, main_program=None, filename=None):
234 235 236 237
    """
    load all persistables from directory by executor.
    """
    load_vars(
238 239 240
        executor,
        dirname=dirname,
        main_program=main_program,
241
        predicate=is_persistable,
242
        filename=filename)
243 244


245 246
def get_inference_program(target_vars, main_program=None):
    if main_program is None:
Y
Yu Yang 已提交
247
        main_program = default_main_program()
248 249
    if not isinstance(target_vars, list):
        target_vars = [target_vars]
W
wanghaoshuang 已提交
250 251 252
    vars = []
    for var in target_vars:
        if isinstance(var, Evaluator):
W
wanghaoshuang 已提交
253 254
            vars.extend(var.states)
            vars.extend(var.metrics)
W
wanghaoshuang 已提交
255 256 257
        else:
            vars.append(var)
    pruned_program = main_program.prune(targets=vars)
258 259 260 261
    inference_program = pruned_program.inference_optimize()
    return inference_program


262 263 264
def prepend_feed_ops(inference_program,
                     feed_target_names,
                     feed_holder_name='feed'):
Q
Qiao Longfei 已提交
265 266 267
    if len(feed_target_names) == 0:
        return

K
Kexin Zhao 已提交
268 269
    global_block = inference_program.global_block()
    feed_var = global_block.create_var(
270 271 272
        name=feed_holder_name,
        type=core.VarDesc.VarType.FEED_MINIBATCH,
        persistable=True)
K
Kexin Zhao 已提交
273

274
    for i, name in enumerate(feed_target_names):
K
fix bug  
Kexin Zhao 已提交
275
        out = global_block.var(name)
K
Kexin Zhao 已提交
276 277 278
        global_block.prepend_op(
            type='feed',
            inputs={'X': [feed_var]},
K
fix bug  
Kexin Zhao 已提交
279
            outputs={'Out': [out]},
K
Kexin Zhao 已提交
280 281 282
            attrs={'col': i})


283 284 285
def append_fetch_ops(inference_program,
                     fetch_target_names,
                     fetch_holder_name='fetch'):
K
Kexin Zhao 已提交
286 287
    global_block = inference_program.global_block()
    fetch_var = global_block.create_var(
288 289 290
        name=fetch_holder_name,
        type=core.VarDesc.VarType.FETCH_LIST,
        persistable=True)
K
Kexin Zhao 已提交
291

292
    for i, name in enumerate(fetch_target_names):
K
Kexin Zhao 已提交
293 294 295 296 297 298 299
        global_block.append_op(
            type='fetch',
            inputs={'X': [name]},
            outputs={'Out': [fetch_var]},
            attrs={'col': i})


300 301 302 303
def save_inference_model(dirname,
                         feeded_var_names,
                         target_vars,
                         executor,
304
                         main_program=None,
305 306
                         model_filename=None,
                         params_filename=None):
307
    """
X
xuwei06 已提交
308
    Build a model especially for inference,
309 310 311 312 313 314
    and save it to directory by the executor.

    :param dirname: directory path
    :param feeded_var_names: Names of variables that need to be feeded data during inference
    :param target_vars: Variables from which we can get inference results.
    :param executor: executor that save inference model
X
xuwei06 已提交
315
    :param main_program: original program, which will be pruned to build the inference model.
Y
Yu Yang 已提交
316
            Default default_main_program().
317 318 319 320 321
    :param model_filename: The name of file to save inference program.
        If not specified, default filename `__model__` will be used.
    :param params_filename: The name of file to save parameters.
        It is used for the case that all parameters are saved in a single binary file.
        If not specified, parameters are considered saved in separate files.
322 323 324

    :return: None
    """
F
fengjiayi 已提交
325 326 327
    if isinstance(feeded_var_names, basestring):
        feeded_var_names = [feeded_var_names]
    else:
Q
Qiao Longfei 已提交
328 329 330 331
        if len(feeded_var_names) > 0:
            if not (bool(feeded_var_names) and all(
                    isinstance(name, basestring) for name in feeded_var_names)):
                raise ValueError("'feed_var_names' should be a list of str.")
F
fengjiayi 已提交
332 333

    if isinstance(target_vars, Variable):
F
fengjiayi 已提交
334
        target_vars = [target_vars]
F
fengjiayi 已提交
335 336 337 338 339
    else:
        if not (bool(target_vars) and all(
                isinstance(var, Variable) for var in target_vars)):
            raise ValueError("'target_vars' should be a list of Variable.")

340
    if main_program is None:
Y
Yu Yang 已提交
341
        main_program = default_main_program()
342
    copy_program = main_program.clone()
343 344 345 346

    if not os.path.isdir(dirname):
        os.makedirs(dirname)

347
    # Clear the is_target information and remove the existed feed and fetch op
348
    global_block = copy_program.global_block()
349 350 351 352
    for i, op in enumerate(global_block.ops):
        op.desc.set_is_target(False)
        if op.type == "feed" or op.type == "fetch":
            global_block.remove_op(i)
353
    copy_program.desc.flush()
354

355
    pruned_program = copy_program.prune(targets=target_vars)
356
    inference_program = pruned_program.inference_optimize()
357 358
    fetch_var_names = [v.name for v in target_vars]

K
Kexin Zhao 已提交
359 360
    prepend_feed_ops(inference_program, feeded_var_names)
    append_fetch_ops(inference_program, fetch_var_names)
361

362 363
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
364
    else:
365 366
        model_filename = "__model__"
    model_filename = os.path.join(dirname, model_filename)
367

368 369 370 371
    if params_filename is not None:
        params_filename = os.path.basename(params_filename)

    with open(model_filename, "wb") as f:
372
        f.write(inference_program.desc.serialize_to_string())
373

374
    save_persistables(executor, dirname, inference_program, params_filename)
375 376


377 378 379 380
def load_inference_model(dirname,
                         executor,
                         model_filename=None,
                         params_filename=None):
381 382 383 384 385
    """
    Load inference model from a directory

    :param dirname: directory path
    :param executor: executor that load inference model
386 387 388 389 390 391
    :param model_filename: The name of file to load inference program.
        If not specified, default filename `__model__` will be used.
    :param params_filename: The name of file to load parameters.
        It is used for the case that all parameters are saved in a single binary file.
        If not specified, parameters are considered saved in separate files.

392
    :return: [program, feed_target_names, fetch_targets]
393
             program: program especially for inference.
394 395
             feed_target_names: Names of variables that need to feed data
             fetch_targets: Variables from which we can get inference results.
396 397 398 399
    """
    if not os.path.isdir(dirname):
        raise ValueError("There is no directory named '%s'", dirname)

400 401
    if model_filename is not None:
        model_filename = os.path.basename(model_filename)
402
    else:
403 404 405 406 407
        model_filename = "__model__"
    model_filename = os.path.join(dirname, model_filename)

    if params_filename is not None:
        params_filename = os.path.basename(params_filename)
408

409
    with open(model_filename, "rb") as f:
410 411
        program_desc_str = f.read()

412
    program = Program.parse_from_string(program_desc_str)
413
    load_persistables(executor, dirname, program, params_filename)
414

415 416
    feed_target_names = program.desc.get_feed_target_names()
    fetch_target_names = program.desc.get_fetch_target_names()
417 418 419 420 421
    fetch_targets = [
        program.global_block().var(name) for name in fetch_target_names
    ]

    return [program, feed_target_names, fetch_targets]
X
xuwei06 已提交
422 423 424 425 426 427 428 429


def get_parameter_value(para, executor):
    """
    Get the LoDTensor for the parameter

    :param executor: executor for retrieving the value
    :param para: the given parameter
430

X
xuwei06 已提交
431 432
    :return: the LoDTensor for the parameter
    """
X
xuwei06 已提交
433 434
    assert is_parameter(para)

X
xuwei06 已提交
435 436 437 438 439 440 441 442 443 444 445 446 447
    get_program = Program()
    block = get_program.global_block()
    new_var = _clone_var_in_block_(block, para)
    return executor.run(get_program, feed={}, fetch_list=[new_var])[0]


def get_parameter_value_by_name(name, executor, program=None):
    """
    Get the LoDTensor for paramter with the given name

    :param executor: executor for retrieving the value
    :param name: the name of the parameter
    :param program: the program where the variable is found
Y
Yu Yang 已提交
448
            Default default_main_program().
449

X
xuwei06 已提交
450 451 452
    :return: the LoDTensor for the variable
    """
    if program is None:
Y
Yu Yang 已提交
453
        program = default_main_program()
X
xuwei06 已提交
454 455
    var = program.global_block().var(name)
    return get_parameter_value(var, executor)
T
tangwei12 已提交
456 457


T
tangwei12 已提交
458
SUCCESS_MARK_FILENAME = "_SUCCESS"
459 460
CHECKPOINT_PREFIX = "checkpoint"
CHECKPOINT_SEPARATOR = "_"
T
tangwei12 已提交
461 462 463


def save_checkpoint(executor,
464
                    checkpoint_dir=None,
T
tangwei12 已提交
465 466
                    max_num_checkpoints=3,
                    save_interval_secs=600,
T
tangwei12 已提交
467 468
                    main_program=None):
    """
T
tangwei12 已提交
469
    Save Checkpoint will save persistable LodTensor variables from main_program in checkpoint directory,
T
tangwei12 已提交
470
    the directory named by serial number from 0 to (n -1), save_checkpoint use LRU strategy
T
tangwei12 已提交
471
    to keep numbers of checkpoint directory,  the numbers of checkpoint directory are max_num_checkpoints at most,
472
    The interval between two saved checkpoints must greater than save_interval_secs.
T
tangwei12 已提交
473

474 475
    :param executor
    :param checkpoint_dir
T
tangwei12 已提交
476
    :param max_num_checkpoints
477
    :param save_interval_secs
T
tangwei12 已提交
478
    :param main_program
T
tangwei12 已提交
479
    """
480 481
    if checkpoint_dir is None:
        checkpoint_dir = os.getcwd()
T
tangwei12 已提交
482

483 484
    if not os.path.isdir(checkpoint_dir):
        os.makedirs(checkpoint_dir)
T
tangwei12 已提交
485

486
    serial = _get_lastest_checkpoint_dir(checkpoint_dir)
T
tangwei12 已提交
487
    if serial >= 0 and not _interval_secs_exceed(
488
            _get_serial_dir(serial, checkpoint_dir), save_interval_secs):
T
tangwei12 已提交
489
        return
T
tangwei12 已提交
490

491 492
    serial += 1
    cur_dir = _get_serial_dir(serial, checkpoint_dir)
T
tangwei12 已提交
493

T
tangwei12 已提交
494 495 496 497 498
    save_vars(
        executor,
        dirname=cur_dir,
        main_program=main_program,
        vars=None,
T
tangwei12 已提交
499
        predicate=_is_checkpoint_var,
T
tangwei12 已提交
500
        filename=None)
T
tangwei12 已提交
501
    _write_success(cur_dir)
502
    _lru_delete(checkpoint_dir, max_num_checkpoints)
T
tangwei12 已提交
503 504


505
def load_checkpoint(executor, checkpoint_dir=None, main_program=None):
T
tangwei12 已提交
506
    """
T
tangwei12 已提交
507
    Load checkpoint from a directory by executor,
508
    it will find  the most recent saved checkpoint file and load it auto.
T
tangwei12 已提交
509

T
tangwei12 已提交
510
    :param executor
511
    :param checkpoint_dir
T
tangwei12 已提交
512
    :param main_program
T
tangwei12 已提交
513
    """
T
tangwei12 已提交
514

515 516
    if checkpoint_dir is None:
        checkpoint_dir = os.getcwd()
T
tangwei12 已提交
517

518
    serial = _get_lastest_checkpoint_dir(checkpoint_dir)
T
tangwei12 已提交
519

T
tangwei12 已提交
520
    if serial < 0:
T
tangwei12 已提交
521
        return
522 523

    cur_dir = _get_serial_dir(serial, checkpoint_dir)
T
tangwei12 已提交
524

T
tangwei12 已提交
525 526 527 528
    load_vars(
        executor,
        dirname=cur_dir,
        main_program=main_program,
T
tangwei12 已提交
529
        predicate=_is_checkpoint_var,
T
tangwei12 已提交
530 531 532
        filename=None)


T
tangwei12 已提交
533 534 535 536 537 538 539 540 541 542 543 544 545
def clean_checkpoint(checkpoint_dir, delete_dir=False):
    """
    clean the checkpoint dir, when the train exits normally, the trainer will call clean_checkpoint to delete checkpoint directory saved before.
    delete_dir only works when the directory is empty, otherwise, OSError is raised.  
    """
    if checkpoint_dir is None:
        checkpoint_dir = os.getcwd()
    _lru_delete(checkpoint_dir, max_num_checkpoints=0)

    if delete_dir and not os.listdir(checkpoint_dir):
        os.rmdir(checkpoint_dir)


546 547 548 549 550
def _get_serial_dir(serial, checkpoint_dir):
    serial_folder = CHECKPOINT_PREFIX + CHECKPOINT_SEPARATOR + str(serial)
    return os.path.join(checkpoint_dir, serial_folder)


T
tangwei12 已提交
551
def _is_checkpoint_var(var):
T
tangwei12 已提交
552
    """
T
tangwei12 已提交
553 554 555
    the checkpoint will not save or load all the variables.
    var type is FEED_MINIBATCH/FETCH_LIST/RAW or var name ends with @GRAD are discarded.

T
tangwei12 已提交
556
    :param var
T
tangwei12 已提交
557
    """
T
tangwei12 已提交
558 559 560 561 562 563 564 565 566
    if var.desc.type() == core.VarDesc.VarType.FEED_MINIBATCH or \
            var.desc.type() == core.VarDesc.VarType.FETCH_LIST or \
            var.desc.type() == core.VarDesc.VarType.RAW:
        return False

    if var.name.endswith("@GRAD"):
        return False

    return var.persistable
T
tangwei12 已提交
567 568


T
tangwei12 已提交
569 570 571 572 573 574 575
def _interval_secs_exceed(dirname, save_interval_secs):
    dir_time = os.path.getmtime(dirname)
    if save_interval_secs > (time.time() - dir_time):
        return False
    return True


T
tangwei12 已提交
576
def _lru_delete(dirname, max_num_checkpoints=3):
T
tangwei12 已提交
577 578 579 580 581 582 583 584
    dirs = os.listdir(dirname)
    serials = []
    for serial in dirs:
        try:
            serials.append(int(serial))
        except ValueError:
            continue

T
tangwei12 已提交
585
    if len(serials) <= max_num_checkpoints:
T
tangwei12 已提交
586 587 588
        return

    serials.sort(reverse=True)
T
tangwei12 已提交
589
    serials = serials[max_num_checkpoints:]
T
tangwei12 已提交
590 591 592 593 594
    for serial in serials:
        cur_dir = os.path.join(dirname, str(serial))
        shutil.rmtree(cur_dir)


T
tangwei12 已提交
595 596
def _write_success(dirname):
    """
T
tangwei12 已提交
597
    write an empty file named "_SUCCESS" in checkpoint dir, indicate this checkpoint is correct.
T
tangwei12 已提交
598 599

    :param dirname
T
tangwei12 已提交
600
    """
T
tangwei12 已提交
601
    success_file = os.path.join(dirname, SUCCESS_MARK_FILENAME)
T
bug fix  
tangwei12 已提交
602
    with open(success_file, 'a') as f:
603
        now = time.ctime()
T
bug fix  
tangwei12 已提交
604
        f.write(now)
T
tangwei12 已提交
605 606 607 608


def _get_lastest_checkpoint_dir(checkpoint_dir):
    """
T
tangwei12 已提交
609 610 611
    get the latest file in checkpoint directory, the _SUCCESS file must exist in the directory

    :param checkpoint_dir
T
tangwei12 已提交
612 613
    """
    if not checkpoint_dir.strip():
T
tangwei12 已提交
614
        return -1
T
tangwei12 已提交
615 616 617 618 619

    def has_success(checkpoint_dir, cur_dir):
        """
        is _SUCCESS in this dir
        """
620
        _, serial = cur_dir.split(CHECKPOINT_SEPARATOR)
T
tangwei12 已提交
621 622

        try:
623
            int(serial)
T
tangwei12 已提交
624 625 626
        except ValueError:
            return -1

627 628 629 630 631
        if not os.path.isdir(os.path.join(checkpoint_dir, cur_dir)):
            return -1

        success_path = os.path.join(
            _get_serial_dir(serial, checkpoint_dir), SUCCESS_MARK_FILENAME)
T
tangwei12 已提交
632
        if os.path.isfile(success_path):
633
            return int(serial)
T
tangwei12 已提交
634 635 636 637 638 639 640 641 642 643 644

    if not os.path.isdir(checkpoint_dir):
        return -1

    current_dir = -1
    dirs = os.listdir(checkpoint_dir)
    for cur_dir in dirs:
        success_num = has_success(checkpoint_dir, cur_dir)
        if success_num > current_dir:
            current_dir = success_num
    return current_dir