From 89b98703bf4c73342094c53cf770c2c8350027cc Mon Sep 17 00:00:00 2001 From: wangguanzhong Date: Sat, 12 Oct 2019 16:30:56 +0800 Subject: [PATCH] unify reader to dataloader (#3487) * unify reader to dataloader * add check_version --- ppdet/modeling/model_input.py | 16 +++++++--------- ppdet/utils/check.py | 18 ++++++++++++++++-- ppdet/utils/eval_utils.py | 6 +++--- tools/eval.py | 10 ++++++---- tools/infer.py | 12 +++++++----- tools/train.py | 18 ++++++++++-------- 6 files changed, 49 insertions(+), 31 deletions(-) diff --git a/ppdet/modeling/model_input.py b/ppdet/modeling/model_input.py index 1f16efe3f..ae6393e5d 100644 --- a/ppdet/modeling/model_input.py +++ b/ppdet/modeling/model_input.py @@ -38,7 +38,7 @@ feed_var_def = [ # yapf: enable -def create_feed(feed, use_pyreader=True): +def create_feed(feed, iterable=False): image_shape = feed.image_shape feed_var_map = {var['name']: var for var in feed_var_def} feed_var_map['image'] = { @@ -66,11 +66,9 @@ def create_feed(feed, use_pyreader=True): dtype=feed_var_map[key]['dtype'], lod_level=feed_var_map[key]['lod_level'])) for key in feed.fields]) - pyreader = None - if use_pyreader: - pyreader = fluid.io.PyReader( - feed_list=list(feed_vars.values()), - capacity=64, - use_double_buffer=True, - iterable=False) - return pyreader, feed_vars + loader = fluid.io.DataLoader.from_generator( + feed_list=list(feed_vars.values()), + capacity=64, + use_double_buffer=True, + iterable=iterable) + return loader, feed_vars diff --git a/ppdet/utils/check.py b/ppdet/utils/check.py index 9e816eaad..305fa3705 100644 --- a/ppdet/utils/check.py +++ b/ppdet/utils/check.py @@ -15,7 +15,6 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function -from __future__ import unicode_literals import sys @@ -24,7 +23,7 @@ import paddle.fluid as fluid import logging logger = logging.getLogger(__name__) -__all__ = ['check_gpu'] +__all__ = ['check_gpu', 'check_version'] def check_gpu(use_gpu): @@ -45,3 +44,18 @@ def check_gpu(use_gpu): except Exception as e: pass + +def check_version(): + """ + Log error and exit when the installed version of paddlepaddle is + not satisfied. + """ + err = "PaddlePaddle version 1.6 or higher is required, " \ + "or a suitable develop version is satisfied as well. \n" \ + "Please make sure the version is good with your code." \ + + try: + fluid.require_version('1.6.0') + except Exception as e: + logger.error(err) + sys.exit(1) diff --git a/ppdet/utils/eval_utils.py b/ppdet/utils/eval_utils.py index 2af875e45..c8913dcb4 100644 --- a/ppdet/utils/eval_utils.py +++ b/ppdet/utils/eval_utils.py @@ -57,7 +57,7 @@ def parse_fetches(fetches, prog=None, extra_keys=None): return keys, values, cls -def eval_run(exe, compile_program, pyreader, keys, values, cls): +def eval_run(exe, compile_program, loader, keys, values, cls): """ Run evaluation program, return program outputs. """ @@ -75,7 +75,7 @@ def eval_run(exe, compile_program, pyreader, keys, values, cls): has_bbox = 'bbox' in keys try: - pyreader.start() + loader.start() while True: outs = exe.run(compile_program, fetch_list=values, @@ -90,7 +90,7 @@ def eval_run(exe, compile_program, pyreader, keys, values, cls): iter_id += 1 images_num += len(res['bbox'][1][0]) if has_bbox else 1 except (StopIteration, fluid.core.EOFException): - pyreader.reset() + loader.reset() logger.info('Test finish iter {}'.format(iter_id)) end_time = time.time() diff --git a/tools/eval.py b/tools/eval.py index 4c941863d..d2ce2c44b 100644 --- a/tools/eval.py +++ b/tools/eval.py @@ -35,7 +35,7 @@ import paddle.fluid as fluid from ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results, json_eval_results import ppdet.utils.checkpoint as checkpoint -from ppdet.utils.check import check_gpu +from ppdet.utils.check import check_gpu, check_version from ppdet.modeling.model_input import create_feed from ppdet.data.data_feed import create_reader from ppdet.core.workspace import load_config, merge_config, create @@ -62,6 +62,8 @@ def main(): # check if set use_gpu=True in paddlepaddle cpu version check_gpu(cfg.use_gpu) + # check if paddlepaddle version is satisfied + check_version() print_total_cfg(cfg) if 'eval_feed' not in cfg: @@ -79,12 +81,12 @@ def main(): eval_prog = fluid.Program() with fluid.program_guard(eval_prog, startup_prog): with fluid.unique_name.guard(): - pyreader, feed_vars = create_feed(eval_feed) + loader, feed_vars = create_feed(eval_feed) fetches = model.eval(feed_vars) eval_prog = eval_prog.clone(True) reader = create_reader(eval_feed, args_path=FLAGS.dataset_dir) - pyreader.decorate_sample_list_generator(reader, place) + loader.set_sample_list_generator(reader, place) # eval already exists json file if FLAGS.json_eval: @@ -120,7 +122,7 @@ def main(): callable(model.is_bbox_normalized): is_bbox_normalized = model.is_bbox_normalized() - results = eval_run(exe, compile_program, pyreader, keys, values, cls) + results = eval_run(exe, compile_program, loader, keys, values, cls) # evaluation resolution = None diff --git a/tools/infer.py b/tools/infer.py index 64049e3fc..608587000 100644 --- a/tools/infer.py +++ b/tools/infer.py @@ -44,7 +44,7 @@ from ppdet.data.data_feed import create_reader from ppdet.utils.eval_utils import parse_fetches from ppdet.utils.cli import ArgsParser -from ppdet.utils.check import check_gpu +from ppdet.utils.check import check_gpu, check_version from ppdet.utils.visualizer import visualize_results import ppdet.utils.checkpoint as checkpoint @@ -150,6 +150,8 @@ def main(): # check if set use_gpu=True in paddlepaddle cpu version check_gpu(cfg.use_gpu) + # check if paddlepaddle version is satisfied + check_version() print_total_cfg(cfg) if 'test_feed' not in cfg: @@ -169,12 +171,12 @@ def main(): infer_prog = fluid.Program() with fluid.program_guard(infer_prog, startup_prog): with fluid.unique_name.guard(): - _, feed_vars = create_feed(test_feed, use_pyreader=False) + loader, feed_vars = create_feed(test_feed, iterable=True) test_fetches = model.test(feed_vars) infer_prog = infer_prog.clone(True) reader = create_reader(test_feed) - feeder = fluid.DataFeeder(place=place, feed_list=feed_vars.values()) + loader.set_sample_list_generator(reader, place) exe.run(startup_prog) if cfg.weights: @@ -219,9 +221,9 @@ def main(): tb_image_frame = 0 # each frame can display ten pictures at most. imid2path = reader.imid2path - for iter_id, data in enumerate(reader()): + for iter_id, data in enumerate(loader()): outs = exe.run(infer_prog, - feed=feeder.feed(data), + feed=data, fetch_list=values, return_numpy=False) res = { diff --git a/tools/train.py b/tools/train.py index b9099210e..e1d130aa2 100644 --- a/tools/train.py +++ b/tools/train.py @@ -46,7 +46,7 @@ from ppdet.utils import dist_utils from ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results from ppdet.utils.stats import TrainingStats from ppdet.utils.cli import ArgsParser -from ppdet.utils.check import check_gpu +from ppdet.utils.check import check_gpu, check_version import ppdet.utils.checkpoint as checkpoint from ppdet.modeling.model_input import create_feed @@ -81,6 +81,8 @@ def main(): # check if set use_gpu=True in paddlepaddle cpu version check_gpu(cfg.use_gpu) + # check if paddlepaddle version is satisfied + check_version() if not FLAGS.dist or trainer_id == 0: print_total_cfg(cfg) @@ -116,7 +118,7 @@ def main(): with fluid.program_guard(train_prog, startup_prog): with fluid.unique_name.guard(): model = create(main_arch) - train_pyreader, feed_vars = create_feed(train_feed) + train_loader, feed_vars = create_feed(train_feed) with mixed_precision_context(FLAGS.loss_scale, FLAGS.fp16) as ctx: train_fetches = model.train(feed_vars) @@ -139,12 +141,12 @@ def main(): with fluid.program_guard(eval_prog, startup_prog): with fluid.unique_name.guard(): model = create(main_arch) - eval_pyreader, feed_vars = create_feed(eval_feed) + eval_loader, feed_vars = create_feed(eval_feed) fetches = model.eval(feed_vars) eval_prog = eval_prog.clone(True) eval_reader = create_reader(eval_feed, args_path=FLAGS.dataset_dir) - eval_pyreader.decorate_sample_list_generator(eval_reader, place) + eval_loader.set_sample_list_generator(eval_reader, place) # parse eval fetches extra_keys = [] @@ -197,7 +199,7 @@ def main(): train_reader = create_reader(train_feed, (cfg.max_iters - start_iter) * devices_num, FLAGS.dataset_dir) - train_pyreader.decorate_sample_list_generator(train_reader, place) + train_loader.set_sample_list_generator(train_reader, place) # whether output bbox is normalized in model output layer is_bbox_normalized = False @@ -209,7 +211,7 @@ def main(): map_type = cfg.map_type if 'map_type' in cfg else '11point' train_stats = TrainingStats(cfg.log_smooth_window, train_keys) - train_pyreader.start() + train_loader.start() start_time = time.time() end_time = time.time() @@ -256,7 +258,7 @@ def main(): if FLAGS.eval: # evaluation - results = eval_run(exe, compiled_eval_prog, eval_pyreader, + results = eval_run(exe, compiled_eval_prog, eval_loader, eval_keys, eval_values, eval_cls) resolution = None if 'mask' in results[0]: @@ -278,7 +280,7 @@ def main(): logger.info("Best test box ap: {}, in iter: {}".format( best_box_ap_list[0], best_box_ap_list[1])) - train_pyreader.reset() + train_loader.reset() if __name__ == '__main__': -- GitLab