From 34234282a65eab864460a476c9380f1d22fa4126 Mon Sep 17 00:00:00 2001 From: hong <43953930+phlrain@users.noreply.github.com> Date: Fri, 12 Aug 2022 14:41:49 +0800 Subject: [PATCH] change default log level (#45093) --- .gitignore | 5 +++++ .../fleet/meta_parallel/sharding/group_sharded_stage2.py | 2 +- python/paddle/distributed/sharding/group_sharded.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 74cf6b8ab02..9925734e437 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,8 @@ paddle/fluid/pybind/eager_final_state_op_function.cc paddle/fluid/operators/generated_op.cc paddle/phi/ops/compat/generated_sig.cc paddle/phi/api/yaml/parsed_apis/ +python/paddle/utils/code_gen/ +paddle/fluid/pybind/tmp_eager_final_state_op_function_impl.h +paddle/fluid/pybind/eager_final_state_op_function_impl.h +paddle/fluid/pybind/eager_op_function_impl.h +paddle/fluid/pybind/op_function_impl.h diff --git a/python/paddle/distributed/fleet/meta_parallel/sharding/group_sharded_stage2.py b/python/paddle/distributed/fleet/meta_parallel/sharding/group_sharded_stage2.py index f13739960b3..905af0487ba 100644 --- a/python/paddle/distributed/fleet/meta_parallel/sharding/group_sharded_stage2.py +++ b/python/paddle/distributed/fleet/meta_parallel/sharding/group_sharded_stage2.py @@ -39,7 +39,7 @@ from .group_sharded_storage import GradStorage from .group_sharded_optimizer_stage2 import GroupShardedOptimizerStage2 from .group_sharded_utils import Taskflow, Type, device_guard -logger_ = get_logger(logging.INFO) +logger_ = get_logger(logging.WARNING) def _trainable(param): diff --git a/python/paddle/distributed/sharding/group_sharded.py b/python/paddle/distributed/sharding/group_sharded.py index 58fb51b62b9..9ebe7fd6031 100644 --- a/python/paddle/distributed/sharding/group_sharded.py +++ b/python/paddle/distributed/sharding/group_sharded.py @@ -34,7 +34,7 @@ from paddle.distributed.fleet.meta_parallel.sharding.group_sharded_stage2 import from paddle.distributed.fleet.meta_parallel.sharding.group_sharded_stage3 import GroupShardedStage3 from paddle.distributed.fleet.meta_parallel.sharding.group_sharded_utils import GroupShardedScaler -logger_ = get_logger(logging.INFO) +logger_ = get_logger(logging.WARNING) def group_sharded_parallel(model, -- GitLab