Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
a0478084
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
a0478084
编写于
4月 01, 2019
作者:
M
minqiyang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Right transformer
上级
124f45c9
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
16 addition
and
6 deletion
+16
-6
python/paddle/fluid/tests/unittests/test_imperative_transformer.py
...ddle/fluid/tests/unittests/test_imperative_transformer.py
+16
-6
未找到文件。
python/paddle/fluid/tests/unittests/test_imperative_transformer.py
浏览文件 @
a0478084
...
...
@@ -517,7 +517,7 @@ class DecoderSubLayer(Layer):
y
=
self
.
_preprocess_layer
(
None
,
input
,
"n"
,
0.1
)
slf_attn_output
=
self
.
_multihead_attention_layer
(
y
,
None
,
None
,
slf_attn_bias
)
return
slf_attn_output
return
slf_attn_output
,
y
class
TestDygraphTransformer
(
unittest
.
TestCase
):
...
...
@@ -536,7 +536,7 @@ class TestDygraphTransformer(unittest.TestCase):
dy_param_init
=
dict
()
dy_param_updated
=
dict
()
for
i
in
range
(
batch_num
):
loss
=
transformer
(
to_variable
(
x1
),
to_variable
(
x2
))
loss
,
y
=
transformer
(
to_variable
(
x1
),
to_variable
(
x2
))
loss
=
fluid
.
layers
.
reduce_sum
(
loss
)
print
(
'dy los'
,
loss
.
shape
)
if
i
==
0
:
...
...
@@ -545,6 +545,7 @@ class TestDygraphTransformer(unittest.TestCase):
loss
.
_backward
()
optimizer
.
minimize
(
loss
)
dy_key_value
=
y
.
_gradient
()
transformer
.
clear_gradients
()
if
i
==
batch_num
-
1
:
for
param
in
transformer
.
parameters
():
...
...
@@ -563,7 +564,7 @@ class TestDygraphTransformer(unittest.TestCase):
data1
=
fluid
.
layers
.
data
(
name
=
'X'
,
shape
=
[
4
,
512
],
dtype
=
'float32'
)
data2
=
fluid
.
layers
.
data
(
name
=
'Y'
,
shape
=
[
8
,
4
,
4
],
dtype
=
'float32'
)
loss
=
transformer
(
data1
,
data2
)
loss
,
y
=
transformer
(
data1
,
data2
)
loss
=
fluid
.
layers
.
reduce_sum
(
loss
)
print
(
'loss hspae'
,
loss
.
shape
)
...
...
@@ -580,24 +581,33 @@ class TestDygraphTransformer(unittest.TestCase):
for
i
in
range
(
len
(
static_param_name_list
)):
static_param_init
[
static_param_name_list
[
i
]]
=
out
[
i
]
print
(
fluid
.
default_main_program
())
for
i
in
range
(
batch_num
):
feed_dict
=
{
"X"
:
x1
,
"Y"
:
x2
}
fetch_list
=
[]
fetch_list
=
[
"transformer/DecoderSubLayer_0/PrePostProcessLayer_0/LayerNorm_0.tmp_2@GRAD"
]
fetch_list
.
extend
(
static_param_name_list
)
out
=
exe
.
run
(
fluid
.
default_main_program
(),
feed
=
feed_dict
,
fetch_list
=
fetch_list
)
if
i
==
batch_num
-
1
:
for
k
in
range
(
0
,
len
(
out
)):
static_key_value
=
out
[
0
]
for
k
in
range
(
1
,
len
(
out
)):
static_param_updated
[
static_param_name_list
[
k
-
0
]]
=
out
[
k
]
1
]]
=
out
[
k
]
for
key
,
value
in
six
.
iteritems
(
static_param_init
):
self
.
assertTrue
(
np
.
array_equal
(
value
,
dy_param_init
[
key
]))
for
key
,
value
in
six
.
iteritems
(
static_param_updated
):
if
not
(
value
==
dy_param_updated
[
key
]).
all
():
print
(
key
)
if
not
np
.
array_equal
(
dy_key_value
,
static_key_value
):
print
(
"xxx"
,
dy_key_value
,
static_key_value
)
print
(
"yyy"
)
print
(
dy_key_value
-
static_key_value
)
print
(
np
.
where
(
dy_key_value
-
static_key_value
))
if
__name__
==
'__main__'
:
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录