Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
29a13edd
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
29a13edd
编写于
2月 12, 2023
作者:
W
wangruting
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
success test_forward
上级
296b64ac
变更
3
显示空白变更内容
内联
并排
Showing
3 changed file
with
46 addition
and
31 deletion
+46
-31
python/paddle/fluid/tests/unittests/prim/composite_ops/test_composite_layer_norm.py
...unittests/prim/composite_ops/test_composite_layer_norm.py
+42
-31
python/paddle/incubate/autograd/composite_rules.py
python/paddle/incubate/autograd/composite_rules.py
+2
-0
python/paddle/incubate/autograd/primitives.py
python/paddle/incubate/autograd/primitives.py
+2
-0
未找到文件。
python/paddle/fluid/tests/unittests/prim/composite_ops/test_composite_layer_norm.py
浏览文件 @
29a13edd
...
...
@@ -65,31 +65,6 @@ def fn(x, norm_shape, w, b):
return
F
.
layer_norm
(
x
,
norm_shape
,
w
,
b
)
def
layer_norm_
(
input
,
weight
,
bias
,
epsilon
=
1e-05
,
begin_norm_axis
=
0
):
axis
=
np
.
arange
(
begin_norm_axis
,
len
(
input
.
shape
))
mean
=
paddle
.
mean
(
input
,
axis
=
axis
,
keepdim
=
True
)
t1
=
input
-
mean
t2
=
paddle
.
pow
(
t1
,
2.0
)
t3
=
paddle
.
mean
(
t2
,
axis
=
axis
,
keepdim
=
True
)
t4
=
t3
+
epsilon
t5
=
paddle
.
sqrt
(
t4
)
t7
=
t1
/
t5
out
=
t7
if
weight
is
not
None
:
weight
=
paddle
.
reshape
(
weight
,
input
.
shape
[
begin_norm_axis
:])
out
=
t7
*
paddle
.
broadcast_to
(
weight
,
out
.
shape
)
if
bias
is
not
None
:
bias
=
paddle
.
reshape
(
bias
,
input
.
shape
[
begin_norm_axis
:])
out
=
out
+
paddle
.
broadcast_to
(
bias
,
out
.
shape
)
return
out
def
composite_forward
(
x
,
norm_shape
,
w
,
b
):
b_axis
=
len
(
x
.
shape
)
-
len
(
norm_shape
)
return
layer_norm_
(
x
,
w
,
b
,
begin_norm_axis
=
b_axis
)
def
expect_forward
(
x
,
norm_shape
,
w
,
b
):
return
fn
(
x
,
norm_shape
,
w
,
b
)
...
...
@@ -97,10 +72,10 @@ def expect_forward(x, norm_shape, w, b):
class
TestCompositelayer_norm
(
unittest
.
TestCase
):
def
setUp
(
self
):
self
.
dtypes
=
[
"float16"
,
"float32"
]
self
.
n_shape
=
[[
3
,
4
],
[
3
],
[
2
,
3
]]
self
.
n_shape
=
[[
4
],
[
3
],
[
2
,
3
]]
self
.
shape1s
=
[[
3
,
4
],
[
2
,
4
,
3
],
[
2
,
2
,
3
]]
self
.
shape2s
=
[[
12
],
[
3
],
[
6
]]
self
.
shape3s
=
[[
12
],
[
3
],
[
6
]]
self
.
shape2s
=
[[
4
],
[
3
],
[
6
]]
self
.
shape3s
=
[[
4
],
[
3
],
[
6
]]
def
cal_composite
(
self
,
inputs
,
norm_shape
,
weight
,
bias
):
paddle
.
enable_static
()
...
...
@@ -144,6 +119,43 @@ class TestCompositelayer_norm(unittest.TestCase):
core
.
_set_prim_forward_enabled
(
False
)
return
res
def
cal2_composite
(
self
,
inputs
,
norm_shape
,
weight
,
bias
):
paddle
.
enable_static
()
core
.
_set_prim_forward_enabled
(
True
)
startup_program
=
paddle
.
static
.
Program
()
main_program
=
paddle
.
static
.
Program
()
with
paddle
.
static
.
program_guard
(
main_program
,
startup_program
):
x
=
paddle
.
static
.
data
(
'x'
,
shape
=
inputs
.
shape
,
dtype
=
str
(
inputs
.
dtype
)
)
y
=
fn
(
x
,
norm_shape
,
weight
,
bias
)
blocks
=
main_program
.
blocks
fwd_ops
=
[
op
.
type
for
op
in
blocks
[
0
].
ops
]
# Ensure that layer_norm in original block
self
.
assertTrue
(
'layer_norm'
in
fwd_ops
)
paddle
.
incubate
.
autograd
.
to_prim
(
blocks
)
fwd_ops_new
=
[
op
.
type
for
op
in
blocks
[
0
].
ops
]
# Ensure that layer_norm is splitted into small ops
self
.
assertTrue
(
'layer_norm'
not
in
fwd_ops_new
)
exe
=
paddle
.
static
.
Executor
()
exe
.
run
(
startup_program
)
res
=
exe
.
run
(
main_program
,
feed
=
{
'x'
:
inputs
,
},
fetch_list
=
[
y
],
)
paddle
.
disable_static
()
core
.
_set_prim_forward_enabled
(
False
)
return
res
def
compare_forward
(
self
):
x
,
w
,
b
=
generate_data
(
attrs
.
shape1
,
attrs
.
shape2
,
attrs
.
shape3
)
n_shape
=
attrs
.
n_shape
...
...
@@ -152,8 +164,7 @@ class TestCompositelayer_norm(unittest.TestCase):
b_p
=
paddle
.
to_tensor
(
b
)
expect
=
expect_forward
(
x_p
,
n_shape
,
w_p
,
b_p
).
numpy
()
# actual = self.cal_composite(x_p, n_shape, w_p, b_p)
actual
=
composite_forward
(
x_p
,
n_shape
,
w_p
,
b_p
).
numpy
()
actual
=
self
.
cal_composite
(
x
,
n_shape
,
w
,
b
)[
0
]
assert
expect
.
dtype
==
actual
.
dtype
np
.
testing
.
assert_allclose
(
...
...
@@ -164,7 +175,7 @@ class TestCompositelayer_norm(unittest.TestCase):
)
expect_2
=
expect_forward
(
x_p
,
n_shape
,
None
,
None
).
numpy
()
actual_2
=
composite_forward
(
x_p
,
n_shape
,
None
,
None
).
numpy
()
actual_2
=
self
.
cal2_composite
(
x
,
n_shape
,
None
,
None
)[
0
]
assert
expect_2
.
dtype
==
actual_2
.
dtype
np
.
testing
.
assert_allclose
(
expect_2
,
...
...
python/paddle/incubate/autograd/composite_rules.py
浏览文件 @
29a13edd
...
...
@@ -121,4 +121,6 @@ def layernorm_composite(x, scale, bias, epsilon, begin_norm_axis):
bias
=
reshape
(
bias
,
x
.
shape
[
begin_norm_axis
:])
out
=
out
+
broadcast_to
(
bias
,
out
.
shape
)
mean_
=
flatten
(
mean_
)
variance
=
flatten
(
variance
)
return
out
,
mean_
,
variance
python/paddle/incubate/autograd/primitives.py
浏览文件 @
29a13edd
...
...
@@ -34,6 +34,7 @@ from paddle.tensor import erf # noqa: F401
from
paddle.tensor
import
erfinv
# noqa: F401
from
paddle.tensor
import
exp
# noqa: F401
from
paddle.tensor
import
expm1
# noqa: F401
from
paddle.tensor
import
flatten
# noqa: F401
from
paddle.tensor
import
lgamma
# noqa: F401
from
paddle.tensor
import
log
# noqa: F401
from
paddle.tensor
import
log1p
# noqa: F401
...
...
@@ -113,6 +114,7 @@ others = [
'assign'
,
'fill_constant'
,
'reshape'
,
'flatten'
,
]
__all__
=
[]
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录