diff --git a/python/paddle/static/amp/__init__.py b/python/paddle/static/amp/__init__.py index 01832fd536769bff0b5613d399d35d61131cad5c..795e49698f34462b4182a69d49e4275db94c70d7 100644 --- a/python/paddle/static/amp/__init__.py +++ b/python/paddle/static/amp/__init__.py @@ -19,9 +19,3 @@ from .fp16_lists import CustomOpLists, AutoMixedPrecisionLists from . import fp16_utils from .fp16_utils import fp16_guard, cast_model_to_fp16, cast_parameters_to_fp16 from . import bf16 -from .bf16 import bf16_guard - -__all__ = [] -__all__ += decorator.__all__ -__all__ += fp16_lists.__all__ -__all__ += fp16_utils.__all__ diff --git a/python/paddle/static/amp/amp_nn.py b/python/paddle/static/amp/amp_nn.py index c5e812c141d3696459c6b040c30fe7e39f02629c..0f936ae8f57b9e9731173aac4efa44593993765d 100644 --- a/python/paddle/static/amp/amp_nn.py +++ b/python/paddle/static/amp/amp_nn.py @@ -18,8 +18,6 @@ from paddle.fluid.data_feeder import check_type, check_variable_and_dtype from paddle.fluid.framework import Variable, in_dygraph_mode from paddle.fluid.layer_helper import LayerHelper -__all__ = ['check_finite_and_unscale', 'update_loss_scaling'] - def check_finite_and_unscale(x, scale, name=None, float_status=None): """ diff --git a/python/paddle/static/amp/bf16/__init__.py b/python/paddle/static/amp/bf16/__init__.py index 82b616b299447c2a297d2dd5e718e8ef4a09b085..fad4a654fd88b8743720a0e469d29c95ad4a1462 100644 --- a/python/paddle/static/amp/bf16/__init__.py +++ b/python/paddle/static/amp/bf16/__init__.py @@ -24,8 +24,3 @@ from .amp_utils import ( ) from . import decorator from .decorator import decorate_bf16 - -__all__ = [] -__all__ += decorator.__all__ -__all__ += amp_lists.__all__ -__all__ += amp_utils.__all__ diff --git a/python/paddle/static/amp/bf16/amp_lists.py b/python/paddle/static/amp/bf16/amp_lists.py index d1878a3367fbc7a72a9cc90cea7028a6893ce37d..5ea5beb708b89414f6aa468f18be8dc28e073277 100644 --- a/python/paddle/static/amp/bf16/amp_lists.py +++ b/python/paddle/static/amp/bf16/amp_lists.py @@ -20,8 +20,6 @@ from ..fp16_lists import black_list as black_list_fp16 from ..fp16_lists import gray_list as gray_list_fp16 from ..fp16_lists import white_list as white_list_fp16 -__all__ = ["AutoMixedPrecisionListsBF16"] - class AutoMixedPrecisionListsBF16: """ diff --git a/python/paddle/static/amp/bf16/amp_utils.py b/python/paddle/static/amp/bf16/amp_utils.py index cf8c82127b3b45f29559444a65bf9847124ccaf3..f9a813aa44d41ccff8a035b9832fa5705fd6e17d 100644 --- a/python/paddle/static/amp/bf16/amp_utils.py +++ b/python/paddle/static/amp/bf16/amp_utils.py @@ -31,14 +31,6 @@ from ..fp16_utils import ( ) from .amp_lists import AutoMixedPrecisionListsBF16 -__all__ = [ - "bf16_guard", - "rewrite_program_bf16", - "cast_model_to_bf16", - "cast_parameters_to_bf16", - "convert_float_to_uint16", -] - _logger = get_logger( __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s' ) diff --git a/python/paddle/static/amp/bf16/decorator.py b/python/paddle/static/amp/bf16/decorator.py index 20286d3eebca5fead08b8e0e1f291478e0bb2080..66963e25634f09a1f73aed6df7945d726c0b9e40 100644 --- a/python/paddle/static/amp/bf16/decorator.py +++ b/python/paddle/static/amp/bf16/decorator.py @@ -25,8 +25,6 @@ from .amp_utils import ( rewrite_program_bf16, ) -__all__ = ["decorate_bf16"] - class OptimizerWithMixedPrecision: """ diff --git a/python/paddle/static/amp/decorator.py b/python/paddle/static/amp/decorator.py index ba33f6b391b0b825ad87ae6ecbf1a14778cf4b2e..827a3a8b599f87eed1a4ed3b66f5157915f6fbe4 100644 --- a/python/paddle/static/amp/decorator.py +++ b/python/paddle/static/amp/decorator.py @@ -34,8 +34,6 @@ from .fp16_utils import ( update_role_var_grad, ) -__all__ = ["decorate"] - class OptimizerWithMixedPrecision: """ diff --git a/python/paddle/static/amp/fp16_lists.py b/python/paddle/static/amp/fp16_lists.py index b2acd0bb5156ddab063a02304dd09d199ee38c80..b3f9b0331a86c19577a09e13b54db9f6aeb57749 100644 --- a/python/paddle/static/amp/fp16_lists.py +++ b/python/paddle/static/amp/fp16_lists.py @@ -16,8 +16,6 @@ import copy from paddle.fluid import core -__all__ = ["CustomOpLists", "AutoMixedPrecisionLists"] - # lookup_table fp16 is slower than fp32, though fp16 is supported. _extra_unsupported_fp16_list = { 'lookup_table', diff --git a/python/paddle/static/amp/fp16_utils.py b/python/paddle/static/amp/fp16_utils.py index c9cee2ab8d25cf96909896453703acfb483087e7..281d3638ee261c9bf8dd53e1c7feee1c50968545 100644 --- a/python/paddle/static/amp/fp16_utils.py +++ b/python/paddle/static/amp/fp16_utils.py @@ -23,8 +23,6 @@ from paddle.fluid.wrapped_decorator import signature_safe_contextmanager from .fp16_lists import AutoMixedPrecisionLists -__all__ = ["fp16_guard", "cast_model_to_fp16", "cast_parameters_to_fp16"] - _logger = get_logger( __name__, logging.INFO, fmt='%(asctime)s-%(levelname)s: %(message)s' )