diff --git a/python/paddle/fluid/dygraph/amp/auto_cast.py b/python/paddle/fluid/dygraph/amp/auto_cast.py index 25a732306388a0636b0d8de2253a1ba5e6415e52..0d02a383c1bb80eb6786bbe183690986c1566d56 100644 --- a/python/paddle/fluid/dygraph/amp/auto_cast.py +++ b/python/paddle/fluid/dygraph/amp/auto_cast.py @@ -23,7 +23,6 @@ import functools import paddle import operator import types -import paddle.fluid as fluid __all__ = ['amp_guard', 'amp_decorate'] @@ -220,16 +219,16 @@ def amp_guard(enable=True, .. code-block:: python import numpy as np - import paddle.fluid as fluid + import paddle data = np.random.uniform(-1, 1, [10, 3, 32, 32]).astype('float32') - with fluid.dygraph.guard(): - conv2d = fluid.dygraph.Conv2D(3, 2, 3) - data = fluid.dygraph.to_variable(data) - with fluid.dygraph.amp_guard(): + with paddle.fluid.dygraph.guard(): + conv2d = paddle.fluid.dygraph.Conv2D(3, 2, 3) + data = paddle.fluid.dygraph.to_variable(data) + with paddle.fluid.dygraph.amp_guard(): conv = conv2d(data) print(conv.dtype) # FP16 - with fluid.dygraph.amp_guard(enable=False): + with paddle.fluid.dygraph.amp_guard(enable=False): conv = conv2d(data) print(conv.dtype) # FP32 @@ -301,7 +300,7 @@ class StateDictHook(object): def __call__(self, state_dict): for key in state_dict: param = state_dict[key] - with fluid.dygraph.guard(): + with paddle.fluid.dygraph.guard(): param_applied = paddle.cast(param, self._save_dtype) param_applied.name = param.name state_dict[key] = param_applied @@ -335,16 +334,15 @@ def amp_decorate(models, # required: gpu # Demo1: single model and optimizer: import paddle - import paddle.fluid as fluid model = paddle.nn.Conv2D(3, 2, 3, bias_attr=False) optimzier = paddle.optimizer.SGD(parameters=model.parameters()) - model, optimizer = fluid.dygraph.amp_decorate(models=model, optimizers=optimzier, level='O2') + model, optimizer = paddle.fluid.dygraph.amp_decorate(models=model, optimizers=optimzier, level='O2') data = paddle.rand([10, 3, 32, 32]) - with fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'): + with paddle.fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'): output = model(data) print(output.dtype) # FP16 @@ -353,11 +351,11 @@ def amp_decorate(models, model2 = paddle.nn.Conv2D(3, 2, 3, bias_attr=False) optimizer2 = paddle.optimizer.Adam(parameters=model2.parameters()) - models, optimizers = fluid.dygraph.amp_decorate(models=[model, model2], optimizers=[optimzier, optimizer2], level='O2') + models, optimizers = paddle.fluid.dygraph.amp_decorate(models=[model, model2], optimizers=[optimzier, optimizer2], level='O2') data = paddle.rand([10, 3, 32, 32]) - with fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'): + with paddle.fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'): output = models[0](data) output2 = models[1](data) print(output.dtype) # FP16