From fcc90531944e33894a77f8ede10758b3b28c8636 Mon Sep 17 00:00:00 2001 From: zhangkaihuo Date: Tue, 17 Jan 2023 11:37:44 +0800 Subject: [PATCH] Fix the paddle/staitc/amp/__init__.py (#49791) --- python/paddle/static/amp/__init__.py | 6 ------ python/paddle/static/amp/amp_nn.py | 2 -- python/paddle/static/amp/bf16/__init__.py | 5 ----- python/paddle/static/amp/bf16/amp_lists.py | 2 -- python/paddle/static/amp/bf16/amp_utils.py | 8 -------- python/paddle/static/amp/bf16/decorator.py | 2 -- python/paddle/static/amp/decorator.py | 2 -- python/paddle/static/amp/fp16_lists.py | 2 -- python/paddle/static/amp/fp16_utils.py | 2 -- 9 files changed, 31 deletions(-) diff --git a/python/paddle/static/amp/__init__.py b/python/paddle/static/amp/__init__.py index 01832fd536..795e49698f 100644 --- a/python/paddle/static/amp/__init__.py +++ b/python/paddle/static/amp/__init__.py @@ -19,9 +19,3 @@ from .fp16_lists import CustomOpLists, AutoMixedPrecisionLists from . import fp16_utils from .fp16_utils import fp16_guard, cast_model_to_fp16, cast_parameters_to_fp16 from . import bf16 -from .bf16 import bf16_guard - -__all__ = [] -__all__ += decorator.__all__ -__all__ += fp16_lists.__all__ -__all__ += fp16_utils.__all__ diff --git a/python/paddle/static/amp/amp_nn.py b/python/paddle/static/amp/amp_nn.py index c5e812c141..0f936ae8f5 100644 --- a/python/paddle/static/amp/amp_nn.py +++ b/python/paddle/static/amp/amp_nn.py @@ -18,8 +18,6 @@ from paddle.fluid.data_feeder import check_type, check_variable_and_dtype from paddle.fluid.framework import Variable, in_dygraph_mode from paddle.fluid.layer_helper import LayerHelper -__all__ = ['check_finite_and_unscale', 'update_loss_scaling'] - def check_finite_and_unscale(x, scale, name=None, float_status=None): """ diff --git a/python/paddle/static/amp/bf16/__init__.py b/python/paddle/static/amp/bf16/__init__.py index 82b616b299..fad4a654fd 100644 --- a/python/paddle/static/amp/bf16/__init__.py +++ b/python/paddle/static/amp/bf16/__init__.py @@ -24,8 +24,3 @@ from .amp_utils import ( ) from . import decorator from .decorator import decorate_bf16 - -__all__ = [] -__all__ += decorator.__all__ -__all__ += amp_lists.__all__ -__all__ += amp_utils.__all__ diff --git a/python/paddle/static/amp/bf16/amp_lists.py b/python/paddle/static/amp/bf16/amp_lists.py index d1878a3367..5ea5beb708 100644 --- a/python/paddle/static/amp/bf16/amp_lists.py +++ b/python/paddle/static/amp/bf16/amp_lists.py @@ -20,8 +20,6 @@ from ..fp16_lists import black_list as black_list_fp16 from ..fp16_lists import gray_list as gray_list_fp16 from ..fp16_lists import white_list as white_list_fp16 -__all__ = ["AutoMixedPrecisionListsBF16"] - class AutoMixedPrecisionListsBF16: """ diff --git a/python/paddle/static/amp/bf16/amp_utils.py b/python/paddle/static/amp/bf16/amp_utils.py index cf8c82127b..f9a813aa44 100644 --- a/python/paddle/static/amp/bf16/amp_utils.py +++ b/python/paddle/static/amp/bf16/amp_utils.py @@ -31,14 +31,6 @@ from ..fp16_utils import ( ) from .amp_lists import AutoMixedPrecisionListsBF16 -__all__ = [ - "bf16_guard", - "rewrite_program_bf16", - "cast_model_to_bf16", - "cast_parameters_to_bf16", - "convert_float_to_uint16", -] - _logger = get_logger( __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s' ) diff --git a/python/paddle/static/amp/bf16/decorator.py b/python/paddle/static/amp/bf16/decorator.py index 20286d3eeb..66963e2563 100644 --- a/python/paddle/static/amp/bf16/decorator.py +++ b/python/paddle/static/amp/bf16/decorator.py @@ -25,8 +25,6 @@ from .amp_utils import ( rewrite_program_bf16, ) -__all__ = ["decorate_bf16"] - class OptimizerWithMixedPrecision: """ diff --git a/python/paddle/static/amp/decorator.py b/python/paddle/static/amp/decorator.py index ba33f6b391..827a3a8b59 100644 --- a/python/paddle/static/amp/decorator.py +++ b/python/paddle/static/amp/decorator.py @@ -34,8 +34,6 @@ from .fp16_utils import ( update_role_var_grad, ) -__all__ = ["decorate"] - class OptimizerWithMixedPrecision: """ diff --git a/python/paddle/static/amp/fp16_lists.py b/python/paddle/static/amp/fp16_lists.py index b2acd0bb51..b3f9b0331a 100644 --- a/python/paddle/static/amp/fp16_lists.py +++ b/python/paddle/static/amp/fp16_lists.py @@ -16,8 +16,6 @@ import copy from paddle.fluid import core -__all__ = ["CustomOpLists", "AutoMixedPrecisionLists"] - # lookup_table fp16 is slower than fp32, though fp16 is supported. _extra_unsupported_fp16_list = { 'lookup_table', diff --git a/python/paddle/static/amp/fp16_utils.py b/python/paddle/static/amp/fp16_utils.py index c9cee2ab8d..281d3638ee 100644 --- a/python/paddle/static/amp/fp16_utils.py +++ b/python/paddle/static/amp/fp16_utils.py @@ -23,8 +23,6 @@ from paddle.fluid.wrapped_decorator import signature_safe_contextmanager from .fp16_lists import AutoMixedPrecisionLists -__all__ = ["fp16_guard", "cast_model_to_fp16", "cast_parameters_to_fp16"] - _logger = get_logger( __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s' ) -- GitLab