diff --git a/python/paddle/distributed/ps/the_one_ps.py b/python/paddle/distributed/ps/the_one_ps.py index 123fa7730748d6ac518f44a2bb9baa9c4a7d12d9..8abf32d61102b40ca3de2479cf94c98ad80cc768 100755 --- a/python/paddle/distributed/ps/the_one_ps.py +++ b/python/paddle/distributed/ps/the_one_ps.py @@ -15,7 +15,6 @@ import warnings import os -import paddle.distributed.fleet.proto.the_one_ps_pb2 as ps_pb2 import paddle.fluid as fluid import paddle.distributed.fleet as fleet from paddle.fluid import core @@ -27,6 +26,7 @@ from paddle.fluid.parallel_executor import ParallelExecutor from paddle.fluid.framework import Variable, Parameter from paddle.distributed.fleet.runtime.runtime_base import RuntimeBase from paddle.distributed.fleet.base.private_helper_function import wait_server_ready +import paddle.distributed.fleet.proto.the_one_ps_pb2 as ps_pb2 from paddle.fluid.communicator import Communicator, HeterClient from google.protobuf import text_format diff --git a/python/paddle/distributed/ps/utils/public.py b/python/paddle/distributed/ps/utils/public.py index 1f2c6fa4e67045269a98a6ada0244ba3a842ff58..b76484a3ebc111fb60c9326ba838ede2246ed219 100755 --- a/python/paddle/distributed/ps/utils/public.py +++ b/python/paddle/distributed/ps/utils/public.py @@ -59,7 +59,8 @@ DATA_NORM_GRAD_NAME = [x + "@GRAD" for x in DATA_NORM_NAME] def logger_config(log_path, logging_name): logger = logging.getLogger(logging_name) logger.setLevel(level=logging.DEBUG) - handler = logging.FileHandler(log_path, mode='a', encoding='UTF-8') + handler = logging.FileHandler( + log_path, mode='a', encoding='UTF-8', delay=True) handler.setLevel(logging.INFO) formatter = logging.Formatter( '%(levelname)s - %(asctime)s - %(pathname)s: %(lineno)s - %(message)s')