diff --git a/ppcls/engine/engine.py b/ppcls/engine/engine.py index aeb34ca57740667b0e30a48605482424a9d30a93..7ab29d8d270d2db14d77fef43d6a273e395c9bea 100644 --- a/ppcls/engine/engine.py +++ b/ppcls/engine/engine.py @@ -71,7 +71,7 @@ class Engine(object): self.output_dir = self.config['Global']['output_dir'] log_file = os.path.join(self.output_dir, self.config["Arch"]["name"], f"{mode}.log") - init_logger(name='root', log_file=log_file) + init_logger(log_file=log_file) print_config(config) # init train_func and eval_func @@ -92,7 +92,8 @@ class Engine(object): self.vdl_writer = LogWriter(logdir=vdl_writer_path) # set device - assert self.config["Global"]["device"] in ["cpu", "gpu", "xpu", "npu", "mlu"] + assert self.config["Global"][ + "device"] in ["cpu", "gpu", "xpu", "npu", "mlu"] self.device = paddle.set_device(self.config["Global"]["device"]) logger.info('train with paddle {} and device {}'.format( paddle.__version__, self.device)) @@ -107,9 +108,7 @@ class Engine(object): self.scale_loss = 1.0 self.use_dynamic_loss_scaling = False if self.amp: - AMP_RELATED_FLAGS_SETTING = { - 'FLAGS_max_inplace_grad_add': 8, - } + AMP_RELATED_FLAGS_SETTING = {'FLAGS_max_inplace_grad_add': 8, } if paddle.is_compiled_with_cuda(): AMP_RELATED_FLAGS_SETTING.update({ 'FLAGS_cudnn_batchnorm_spatial_persistent': 1 diff --git a/ppcls/static/train.py b/ppcls/static/train.py index 1961dfaf32e25740bdd84dd24a3b10b16f4b1d2c..dd16cdb4caa41671d8f9979fea32e49611cf6ab0 100644 --- a/ppcls/static/train.py +++ b/ppcls/static/train.py @@ -71,7 +71,7 @@ def main(args): log_file = os.path.join(global_config['output_dir'], config["Arch"]["name"], f"{mode}.log") - init_logger(name='root', log_file=log_file) + init_logger(log_file=log_file) print_config(config) if global_config.get("is_distributed", True): diff --git a/ppcls/utils/logger.py b/ppcls/utils/logger.py index d4faaa9b0cd77b6038b40277cc62b4c6981f2da4..bc8de364091e9b56dafdcffa4475f7f225306e1b 100644 --- a/ppcls/utils/logger.py +++ b/ppcls/utils/logger.py @@ -22,7 +22,7 @@ import paddle.distributed as dist _logger = None -def init_logger(name='root', log_file=None, log_level=logging.INFO): +def init_logger(name='ppcls', log_file=None, log_level=logging.INFO): """Initialize and get a logger by name. If the logger has not been initialized, this method will initialize the logger by adding one or two handlers, otherwise the initialized logger will @@ -59,6 +59,7 @@ def init_logger(name='root', log_file=None, log_level=logging.INFO): _logger.setLevel(log_level) else: _logger.setLevel(logging.ERROR) + _logger.propagate = False def log_at_trainer0(log):