From c3b7bc61fe74147ea7b07eaa88ac0f2d801bedb4 Mon Sep 17 00:00:00 2001 From: Liyulingyue <83450930+Liyulingyue@users.noreply.github.com> Date: Mon, 9 May 2022 17:16:24 +0800 Subject: [PATCH] fix docs of auto_cast, cuda_places, static.save (#42107) * auto_cast; test=document_fix * static.save; test=document_fix * cuda_places; test=document_fix --- python/paddle/amp/auto_cast.py | 10 +++++----- python/paddle/fluid/io.py | 3 +-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/python/paddle/amp/auto_cast.py b/python/paddle/amp/auto_cast.py index 5132f23079f..96a94d89846 100644 --- a/python/paddle/amp/auto_cast.py +++ b/python/paddle/amp/auto_cast.py @@ -54,25 +54,25 @@ def auto_cast(enable=True, with paddle.amp.auto_cast(): conv = conv2d(data) - print(conv.dtype) # FP16 + print(conv.dtype) # paddle.float32 with paddle.amp.auto_cast(enable=False): conv = conv2d(data) - print(conv.dtype) # FP32 + print(conv.dtype) # paddle.float32 with paddle.amp.auto_cast(custom_black_list={'conv2d'}): conv = conv2d(data) - print(conv.dtype) # FP32 + print(conv.dtype) # paddle.float32 a = paddle.rand([2,3]) b = paddle.rand([2,3]) with paddle.amp.auto_cast(custom_white_list={'elementwise_add'}): c = a + b - print(c.dtype) # FP16 + print(c.dtype) # paddle.float32 with paddle.amp.auto_cast(custom_white_list={'elementwise_add'}, level='O2'): d = a + b - print(d.dtype) # FP16 + print(d.dtype) # paddle.float32 """ return amp_guard(enable, custom_white_list, custom_black_list, level, dtype) diff --git a/python/paddle/fluid/io.py b/python/paddle/fluid/io.py index 7c7f101286e..8b25c93d7ce 100644 --- a/python/paddle/fluid/io.py +++ b/python/paddle/fluid/io.py @@ -1846,8 +1846,7 @@ def _legacy_save(param_dict, model_path, protocol=2): @static_only def save(program, model_path, protocol=4, **configs): """ - :api_attr: Static Graph - + This function save parameters, optimizer information and network description to model_path. The parameters contains all the trainable Tensor, will save to a file with suffix ".pdparams". -- GitLab