Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
b027652b
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
b027652b
编写于
9月 21, 2022
作者:
G
Guoxia Wang
提交者:
GitHub
9月 21, 2022
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
remove tmp fp32 var for gaussian_random (#46285)
上级
3e8b3220
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
9 addition
and
43 deletion
+9
-43
python/paddle/fluid/initializer.py
python/paddle/fluid/initializer.py
+7
-39
python/paddle/fluid/tests/unittests/test_initializer.py
python/paddle/fluid/tests/unittests/test_initializer.py
+1
-3
python/paddle/fluid/tests/unittests/test_initializer_nn.py
python/paddle/fluid/tests/unittests/test_initializer_nn.py
+1
-1
未找到文件。
python/paddle/fluid/initializer.py
浏览文件 @
b027652b
...
@@ -374,19 +374,6 @@ class NormalInitializer(Initializer):
...
@@ -374,19 +374,6 @@ class NormalInitializer(Initializer):
[
"uint16"
,
"float16"
,
"float32"
,
"float64"
],
[
"uint16"
,
"float16"
,
"float32"
,
"float64"
],
"guassian_random"
)
"guassian_random"
)
# to be compatible of fp16 initalizers
if
var
.
dtype
in
[
VarDesc
.
VarType
.
FP16
,
VarDesc
.
VarType
.
BF16
]:
out_dtype
=
VarDesc
.
VarType
.
FP32
out_var
=
block
.
create_var
(
name
=
unique_name
.
generate
(
"."
.
join
(
[
'normal_init'
,
var
.
name
,
'tmp'
])),
shape
=
var
.
shape
,
dtype
=
out_dtype
,
type
=
VarDesc
.
VarType
.
LOD_TENSOR
,
persistable
=
False
)
else
:
out_dtype
=
var
.
dtype
out_var
=
var
if
self
.
_seed
==
0
:
if
self
.
_seed
==
0
:
self
.
_seed
=
block
.
program
.
random_seed
self
.
_seed
=
block
.
program
.
random_seed
...
@@ -394,48 +381,29 @@ class NormalInitializer(Initializer):
...
@@ -394,48 +381,29 @@ class NormalInitializer(Initializer):
place
=
_current_expected_place
()
place
=
_current_expected_place
()
out_var
=
_C_ops
.
gaussian_random
(
var
.
shape
,
self
.
_mean
,
out_var
=
_C_ops
.
gaussian_random
(
var
.
shape
,
self
.
_mean
,
self
.
_std_dev
,
self
.
_seed
,
self
.
_std_dev
,
self
.
_seed
,
out_dtype
,
place
)
var
.
dtype
,
place
)
out_var
.
_share_underline_tensor_to
(
var
)
if
var
.
dtype
in
[
VarDesc
.
VarType
.
FP16
,
VarDesc
.
VarType
.
BF16
]:
var_tmp
=
_C_ops
.
cast
(
out_var
,
var
.
dtype
)
var_tmp
.
_share_underline_tensor_to
(
var
)
else
:
out_var
.
_share_underline_tensor_to
(
var
)
return
None
return
None
if
_in_legacy_dygraph
():
if
_in_legacy_dygraph
():
out_var
=
_legacy_C_ops
.
gaussian_random
(
out_var
=
_legacy_C_ops
.
gaussian_random
(
'shape'
,
var
.
shape
,
'dtype'
,
out_
dtype
,
'mean'
,
self
.
_mean
,
'shape'
,
var
.
shape
,
'dtype'
,
var
.
dtype
,
'mean'
,
self
.
_mean
,
'std'
,
self
.
_std_dev
,
'seed'
,
self
.
_seed
,
'use_mkldnn'
,
False
)
'std'
,
self
.
_std_dev
,
'seed'
,
self
.
_seed
,
'use_mkldnn'
,
False
)
if
var
.
dtype
in
[
VarDesc
.
VarType
.
FP16
,
VarDesc
.
VarType
.
BF16
]:
out_var
.
_share_underline_tensor_to
(
var
)
var_tmp
=
_legacy_C_ops
.
cast
(
out_var
,
'in_dtype'
,
out_var
.
dtype
,
'out_dtype'
,
var
.
dtype
)
var_tmp
.
_share_underline_tensor_to
(
var
)
else
:
out_var
.
_share_underline_tensor_to
(
var
)
return
None
return
None
else
:
else
:
op
=
block
.
append_op
(
type
=
"gaussian_random"
,
op
=
block
.
append_op
(
type
=
"gaussian_random"
,
outputs
=
{
"Out"
:
out_
var
},
outputs
=
{
"Out"
:
var
},
attrs
=
{
attrs
=
{
"shape"
:
var
.
shape
,
"shape"
:
var
.
shape
,
"dtype"
:
out_
dtype
,
"dtype"
:
var
.
dtype
,
"mean"
:
self
.
_mean
,
"mean"
:
self
.
_mean
,
"std"
:
self
.
_std_dev
,
"std"
:
self
.
_std_dev
,
"seed"
:
self
.
_seed
,
"seed"
:
self
.
_seed
,
"use_mkldnn"
:
False
"use_mkldnn"
:
False
},
},
stop_gradient
=
True
)
stop_gradient
=
True
)
if
var
.
dtype
in
[
VarDesc
.
VarType
.
FP16
,
VarDesc
.
VarType
.
BF16
]:
block
.
append_op
(
type
=
"cast"
,
inputs
=
{
"X"
:
out_var
},
outputs
=
{
"Out"
:
var
},
attrs
=
{
"in_dtype"
:
out_var
.
dtype
,
"out_dtype"
:
var
.
dtype
})
var
.
op
=
op
var
.
op
=
op
return
op
return
op
...
@@ -695,7 +663,7 @@ class XavierInitializer(Initializer):
...
@@ -695,7 +663,7 @@ class XavierInitializer(Initializer):
outputs
=
{
"Out"
:
out_var
},
outputs
=
{
"Out"
:
out_var
},
attrs
=
{
attrs
=
{
"shape"
:
out_var
.
shape
,
"shape"
:
out_var
.
shape
,
"dtype"
:
out_dtype
,
"dtype"
:
out_
var
.
dtype
,
"mean"
:
0.0
,
"mean"
:
0.0
,
"std"
:
std
,
"std"
:
std
,
"seed"
:
self
.
_seed
"seed"
:
self
.
_seed
...
...
python/paddle/fluid/tests/unittests/test_initializer.py
浏览文件 @
b027652b
...
@@ -245,7 +245,7 @@ class TestNormalInitializer(unittest.TestCase):
...
@@ -245,7 +245,7 @@ class TestNormalInitializer(unittest.TestCase):
name
=
"param"
,
name
=
"param"
,
initializer
=
initializer
.
NormalInitializer
(
initializer
=
initializer
.
NormalInitializer
(
2.3
,
1.9
,
123
))
2.3
,
1.9
,
123
))
num_ops
=
2
if
(
dtype
==
"float16"
or
dtype
==
"uint16"
)
else
1
num_ops
=
1
self
.
assertEqual
(
len
(
block
.
ops
),
num_ops
)
self
.
assertEqual
(
len
(
block
.
ops
),
num_ops
)
init_op
=
block
.
ops
[
0
]
init_op
=
block
.
ops
[
0
]
self
.
assertEqual
(
init_op
.
type
,
'gaussian_random'
)
self
.
assertEqual
(
init_op
.
type
,
'gaussian_random'
)
...
@@ -390,7 +390,6 @@ class TestXavierInitializer(unittest.TestCase):
...
@@ -390,7 +390,6 @@ class TestXavierInitializer(unittest.TestCase):
"""Test the Xavier initializer with float16
"""Test the Xavier initializer with float16
"""
"""
block
=
self
.
test_xavier_initializer_supplied_arguments
(
"float16"
)
block
=
self
.
test_xavier_initializer_supplied_arguments
(
"float16"
)
self
.
assertTrue
(
check_cast_op
(
block
.
ops
[
1
]))
def
test_xavier_initializer_bf16
(
self
):
def
test_xavier_initializer_bf16
(
self
):
"""Test the Xavier initializer with bfloat16
"""Test the Xavier initializer with bfloat16
...
@@ -400,7 +399,6 @@ class TestXavierInitializer(unittest.TestCase):
...
@@ -400,7 +399,6 @@ class TestXavierInitializer(unittest.TestCase):
self
.
assertEqual
(
len
(
block_uniform
.
ops
),
1
)
self
.
assertEqual
(
len
(
block_uniform
.
ops
),
1
)
block_gaussian
=
self
.
test_xavier_initializer_supplied_arguments
(
block_gaussian
=
self
.
test_xavier_initializer_supplied_arguments
(
"uint16"
,
False
)
"uint16"
,
False
)
self
.
assertTrue
(
check_cast_op
(
block_gaussian
.
ops
[
1
]))
class
TestMSRAInitializer
(
unittest
.
TestCase
):
class
TestMSRAInitializer
(
unittest
.
TestCase
):
...
...
python/paddle/fluid/tests/unittests/test_initializer_nn.py
浏览文件 @
b027652b
...
@@ -398,7 +398,7 @@ class TestNormal(unittest.TestCase):
...
@@ -398,7 +398,7 @@ class TestNormal(unittest.TestCase):
lod_level
=
0
,
lod_level
=
0
,
name
=
"param"
,
name
=
"param"
,
initializer
=
initializer
.
Normal
(
2.3
,
1.9
))
initializer
=
initializer
.
Normal
(
2.3
,
1.9
))
num_ops
=
2
if
dtype
in
[
"float16"
,
"uint16"
]
else
1
num_ops
=
1
self
.
assertEqual
(
len
(
block
.
ops
),
num_ops
)
self
.
assertEqual
(
len
(
block
.
ops
),
num_ops
)
init_op
=
block
.
ops
[
0
]
init_op
=
block
.
ops
[
0
]
self
.
assertEqual
(
init_op
.
type
,
'gaussian_random'
)
self
.
assertEqual
(
init_op
.
type
,
'gaussian_random'
)
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录