diff --git a/paddle/fluid/imperative/reducer.cc b/paddle/fluid/imperative/reducer.cc index 71d68fa2e0d6d9070601ded18891312a5d3b5fbf..3f0703f05a80a523cb2818c2104943f010b15191 100644 --- a/paddle/fluid/imperative/reducer.cc +++ b/paddle/fluid/imperative/reducer.cc @@ -225,7 +225,7 @@ void Reducer::MarkVariableReady(const VariableIndex &var_index, void Reducer::MarkGroupReady(size_t group_index) { if (group_index > next_group_) { - LOG(WARNING) << "Maybe it need adjust the order of group"; + VLOG(3) << "Maybe it need adjust the order of group"; return; } diff --git a/python/paddle/distributed/fleet/base/fleet_base.py b/python/paddle/distributed/fleet/base/fleet_base.py index 75e22807e4e88f6e8e94e649089bb3c2e447f2d6..9ea912c78c56a98d662d2e0e5058ee890a0cc2ed 100644 --- a/python/paddle/distributed/fleet/base/fleet_base.py +++ b/python/paddle/distributed/fleet/base/fleet_base.py @@ -615,10 +615,10 @@ class Fleet(object): if strategy is not None: warnings.warn( - "It is recommended to pass in DistributedStrategy" - "in fleet.init. The strategy here is for compatibility." - "If the `strategy` in fleet.distributed_optimizer() is" - "not None, then it will overwrite the DistributedStrategy in fleet.init()," + "It is recommended to use DistributedStrategy " + "in fleet.init(). The strategy here is only for compatibility. " + "If the strategy in fleet.distributed_optimizer() is " + "not None, then it will overwrite the DistributedStrategy in fleet.init(), " "which will take effect in distributed training.") self._user_defined_strategy = copy.deepcopy(strategy)