Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
fcc90531
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
fcc90531
编写于
1月 17, 2023
作者:
Z
zhangkaihuo
提交者:
GitHub
1月 17, 2023
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Fix the paddle/staitc/amp/__init__.py (#49791)
上级
86fa1715
变更
9
隐藏空白更改
内联
并排
Showing
9 changed file
with
0 addition
and
31 deletion
+0
-31
python/paddle/static/amp/__init__.py
python/paddle/static/amp/__init__.py
+0
-6
python/paddle/static/amp/amp_nn.py
python/paddle/static/amp/amp_nn.py
+0
-2
python/paddle/static/amp/bf16/__init__.py
python/paddle/static/amp/bf16/__init__.py
+0
-5
python/paddle/static/amp/bf16/amp_lists.py
python/paddle/static/amp/bf16/amp_lists.py
+0
-2
python/paddle/static/amp/bf16/amp_utils.py
python/paddle/static/amp/bf16/amp_utils.py
+0
-8
python/paddle/static/amp/bf16/decorator.py
python/paddle/static/amp/bf16/decorator.py
+0
-2
python/paddle/static/amp/decorator.py
python/paddle/static/amp/decorator.py
+0
-2
python/paddle/static/amp/fp16_lists.py
python/paddle/static/amp/fp16_lists.py
+0
-2
python/paddle/static/amp/fp16_utils.py
python/paddle/static/amp/fp16_utils.py
+0
-2
未找到文件。
python/paddle/static/amp/__init__.py
浏览文件 @
fcc90531
...
@@ -19,9 +19,3 @@ from .fp16_lists import CustomOpLists, AutoMixedPrecisionLists
...
@@ -19,9 +19,3 @@ from .fp16_lists import CustomOpLists, AutoMixedPrecisionLists
from
.
import
fp16_utils
from
.
import
fp16_utils
from
.fp16_utils
import
fp16_guard
,
cast_model_to_fp16
,
cast_parameters_to_fp16
from
.fp16_utils
import
fp16_guard
,
cast_model_to_fp16
,
cast_parameters_to_fp16
from
.
import
bf16
from
.
import
bf16
from
.bf16
import
bf16_guard
__all__
=
[]
__all__
+=
decorator
.
__all__
__all__
+=
fp16_lists
.
__all__
__all__
+=
fp16_utils
.
__all__
python/paddle/static/amp/amp_nn.py
浏览文件 @
fcc90531
...
@@ -18,8 +18,6 @@ from paddle.fluid.data_feeder import check_type, check_variable_and_dtype
...
@@ -18,8 +18,6 @@ from paddle.fluid.data_feeder import check_type, check_variable_and_dtype
from
paddle.fluid.framework
import
Variable
,
in_dygraph_mode
from
paddle.fluid.framework
import
Variable
,
in_dygraph_mode
from
paddle.fluid.layer_helper
import
LayerHelper
from
paddle.fluid.layer_helper
import
LayerHelper
__all__
=
[
'check_finite_and_unscale'
,
'update_loss_scaling'
]
def
check_finite_and_unscale
(
x
,
scale
,
name
=
None
,
float_status
=
None
):
def
check_finite_and_unscale
(
x
,
scale
,
name
=
None
,
float_status
=
None
):
"""
"""
...
...
python/paddle/static/amp/bf16/__init__.py
浏览文件 @
fcc90531
...
@@ -24,8 +24,3 @@ from .amp_utils import (
...
@@ -24,8 +24,3 @@ from .amp_utils import (
)
)
from
.
import
decorator
from
.
import
decorator
from
.decorator
import
decorate_bf16
from
.decorator
import
decorate_bf16
__all__
=
[]
__all__
+=
decorator
.
__all__
__all__
+=
amp_lists
.
__all__
__all__
+=
amp_utils
.
__all__
python/paddle/static/amp/bf16/amp_lists.py
浏览文件 @
fcc90531
...
@@ -20,8 +20,6 @@ from ..fp16_lists import black_list as black_list_fp16
...
@@ -20,8 +20,6 @@ from ..fp16_lists import black_list as black_list_fp16
from
..fp16_lists
import
gray_list
as
gray_list_fp16
from
..fp16_lists
import
gray_list
as
gray_list_fp16
from
..fp16_lists
import
white_list
as
white_list_fp16
from
..fp16_lists
import
white_list
as
white_list_fp16
__all__
=
[
"AutoMixedPrecisionListsBF16"
]
class
AutoMixedPrecisionListsBF16
:
class
AutoMixedPrecisionListsBF16
:
"""
"""
...
...
python/paddle/static/amp/bf16/amp_utils.py
浏览文件 @
fcc90531
...
@@ -31,14 +31,6 @@ from ..fp16_utils import (
...
@@ -31,14 +31,6 @@ from ..fp16_utils import (
)
)
from
.amp_lists
import
AutoMixedPrecisionListsBF16
from
.amp_lists
import
AutoMixedPrecisionListsBF16
__all__
=
[
"bf16_guard"
,
"rewrite_program_bf16"
,
"cast_model_to_bf16"
,
"cast_parameters_to_bf16"
,
"convert_float_to_uint16"
,
]
_logger
=
get_logger
(
_logger
=
get_logger
(
__name__
,
logging
.
INFO
,
fmt
=
'%(asctime)s-%(levelname)s: %(message)s'
__name__
,
logging
.
INFO
,
fmt
=
'%(asctime)s-%(levelname)s: %(message)s'
)
)
...
...
python/paddle/static/amp/bf16/decorator.py
浏览文件 @
fcc90531
...
@@ -25,8 +25,6 @@ from .amp_utils import (
...
@@ -25,8 +25,6 @@ from .amp_utils import (
rewrite_program_bf16
,
rewrite_program_bf16
,
)
)
__all__
=
[
"decorate_bf16"
]
class
OptimizerWithMixedPrecision
:
class
OptimizerWithMixedPrecision
:
"""
"""
...
...
python/paddle/static/amp/decorator.py
浏览文件 @
fcc90531
...
@@ -34,8 +34,6 @@ from .fp16_utils import (
...
@@ -34,8 +34,6 @@ from .fp16_utils import (
update_role_var_grad
,
update_role_var_grad
,
)
)
__all__
=
[
"decorate"
]
class
OptimizerWithMixedPrecision
:
class
OptimizerWithMixedPrecision
:
"""
"""
...
...
python/paddle/static/amp/fp16_lists.py
浏览文件 @
fcc90531
...
@@ -16,8 +16,6 @@ import copy
...
@@ -16,8 +16,6 @@ import copy
from
paddle.fluid
import
core
from
paddle.fluid
import
core
__all__
=
[
"CustomOpLists"
,
"AutoMixedPrecisionLists"
]
# lookup_table fp16 is slower than fp32, though fp16 is supported.
# lookup_table fp16 is slower than fp32, though fp16 is supported.
_extra_unsupported_fp16_list
=
{
_extra_unsupported_fp16_list
=
{
'lookup_table'
,
'lookup_table'
,
...
...
python/paddle/static/amp/fp16_utils.py
浏览文件 @
fcc90531
...
@@ -23,8 +23,6 @@ from paddle.fluid.wrapped_decorator import signature_safe_contextmanager
...
@@ -23,8 +23,6 @@ from paddle.fluid.wrapped_decorator import signature_safe_contextmanager
from
.fp16_lists
import
AutoMixedPrecisionLists
from
.fp16_lists
import
AutoMixedPrecisionLists
__all__
=
[
"fp16_guard"
,
"cast_model_to_fp16"
,
"cast_parameters_to_fp16"
]
_logger
=
get_logger
(
_logger
=
get_logger
(
__name__
,
logging
.
INFO
,
fmt
=
'%(asctime)s-%(levelname)s: %(message)s'
__name__
,
logging
.
INFO
,
fmt
=
'%(asctime)s-%(levelname)s: %(message)s'
)
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录