Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
b0c75f17
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
b0c75f17
编写于
1月 30, 2019
作者:
X
xuezhong
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
remove debug print
上级
88083632
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
0 addition
and
81 deletion
+0
-81
paddle/fluid/operators/lstmp_op.h
paddle/fluid/operators/lstmp_op.h
+0
-1
python/paddle/fluid/tests/unittests/test_lstmp_op.py
python/paddle/fluid/tests/unittests/test_lstmp_op.py
+0
-80
未找到文件。
paddle/fluid/operators/lstmp_op.h
浏览文件 @
b0c75f17
...
...
@@ -225,7 +225,6 @@ class LSTMPKernel : public framework::OpKernel<T> {
// Since the batch computing for LSTMP reorders the input sequence
// according to their length. The initialized hidden state also needs
// to reorder.
VLOG
(
1
)
<<
"qxz h0 used"
;
ReorderInitState
<
DeviceContext
,
T
>
(
device_ctx
,
*
hidden_t0
,
order
,
&
ordered_h0
,
true
);
blas
.
MatMul
(
ordered_h0
,
false
,
*
weight
,
false
,
static_cast
<
T
>
(
1.0
),
...
...
python/paddle/fluid/tests/unittests/test_lstmp_op.py
浏览文件 @
b0c75f17
...
...
@@ -44,7 +44,6 @@ def lstmp(
act_proj
=
None
):
def
_step
(
x
,
w_r
,
w_rh
,
w_c
,
r_pre
,
c_pre
,
proj_clip
,
cell_clip
,
act_gate
,
act_cell
,
act_cand
,
act_proj
):
#import pdb; pdb.set_trace()
g
=
np
.
dot
(
r_pre
,
w_r
)
# 1 x 4D
g
=
g
+
x
g
=
np
.
reshape
(
g
,
(
1
,
g
.
size
))
...
...
@@ -59,9 +58,6 @@ def lstmp(
c
=
g_f
*
c_pre
+
g_i
*
act_cand
(
c
)
# 1 x D
def
array_clip
(
a
,
clip
):
#print('clip:{}'.format(clip))
#print('old' + str(a))
size
=
np
.
prod
(
a
.
shape
)
new_a
=
np
.
reshape
(
a
,
(
size
))
for
i
in
range
(
size
):
...
...
@@ -134,92 +130,17 @@ class TestLstmpOp(LstmTest.TestLstmOp):
def
reset_argument
(
self
):
pass
def
setUp2
(
self
):
self
.
set_argument
()
# projection size
self
.
P
=
2
self
.
reset_argument
()
self
.
op_type
=
'lstmp'
self
.
act_proj
=
'identity'
self
.
use_peepholes
=
False
self
.
has_initial_state
=
True
self
.
lod
=
[[
5
]]
T
=
sum
(
self
.
lod
[
0
])
N
=
len
(
self
.
lod
[
0
])
proj_clip
=
0.5
cell_clip
=
0.0
#import pdb; pdb.set_trace()
x
=
np
.
array
([[
-
0.50806344
,
0.50909436
],
\
[
-
0.50087136
,
0.4904187
],
\
[
-
0.48933774
,
0.50408053
],
\
[
0.00896523
,
0.00770854
],
\
[
-
0.00851139
,
-
0.01005108
]])
wx
=
np
.
array
([[
0.2932311
,
-
0.8829277
,
1.100133
,
0.8197811
,
-
0.8194872
,
-
0.829262
,
0.7708865
,
-
0.62339246
,
-
0.7656475
,
0.4283645
,
-
0.27164033
,
-
0.3600223
],
\
[
-
0.609142
,
0.25025278
,
0.15731744
,
-
0.66051376
,
-
0.70994514
,
0.8344964
,
-
0.00551117
,
-
0.7072167
,
-
0.63929003
,
-
0.52340907
,
-
0.8842589
,
0.9531688
]])
x
=
np
.
dot
(
x
,
wx
)
w
=
np
.
array
([[
0.7808204
,
-
0.7412322
,
-
0.9458036
,
-
0.01664658
,
0.7930616
,
0.10208707
,
0.20036687
,
-
0.16743736
,
1.0295134
,
-
0.3118722
,
0.02241168
,
0.3154219
],
\
[
-
0.29026014
,
0.24638331
,
-
0.5435432
,
0.87635124
,
-
0.96091515
,
-
0.1411362
,
0.58606523
,
-
0.38996056
,
-
0.9003789
,
0.8540163
,
-
0.8831781
,
-
0.28499633
]])
w_rh
=
np
.
array
([[
0.15685119
,
0.05694652
],
[
-
0.9641068
,
-
1.5106804
],
[
0.3599193
,
1.2540514
]])
w_b
=
np
.
array
([[
-
0.49999997
,
0.5
,
-
0.49999997
,
-
0.5
,
0.5
,
0.5
,
0.49999997
,
-
0.49999997
,
0.49999997
,
-
0.5
,
0.49999997
,
0.5
]])
h0
=
np
.
array
([[
-
1.3392334e-04
,
-
6.8468950e-04
]])
c0
=
np
.
array
([[
4.5552300e-04
,
1.3302206e-03
,
-
3.6721351e-04
]])
w_c
=
None
self
.
lod
=
[[
5
]]
#import pdb; pdb.set_trace()
r
,
c
=
lstmp
(
x
,
self
.
lod
,
h0
,
c0
,
w
,
w_rh
,
w_b
,
w_c
,
self
.
is_reverse
,
proj_clip
,
cell_clip
,
ACTIVATION
[
self
.
act_gate
],
ACTIVATION
[
self
.
act_cell
],
ACTIVATION
[
self
.
act_cand
],
ACTIVATION
[
self
.
act_proj
])
self
.
inputs
=
{
'Input'
:
(
x
,
self
.
lod
),
'Weight'
:
w
,
'ProjWeight'
:
w_rh
}
self
.
inputs
[
'Bias'
]
=
w_b
if
self
.
has_initial_state
:
self
.
inputs
[
'H0'
]
=
h0
self
.
inputs
[
'C0'
]
=
c0
self
.
outputs
=
{
'Projection'
:
(
r
,
self
.
lod
),
'Cell'
:
(
c
,
self
.
lod
),
}
self
.
attrs
=
{
'use_peepholes'
:
self
.
use_peepholes
,
'is_reverse'
:
self
.
is_reverse
,
'proj_clip'
:
proj_clip
,
'cell_clip'
:
cell_clip
,
'gate_activation'
:
self
.
act_gate
,
'cell_activation'
:
self
.
act_cell
,
'candidate_activation'
:
self
.
act_cand
,
'proj_activation'
:
self
.
act_proj
}
def
setUp
(
self
):
self
.
set_argument
()
# projection size
self
.
P
=
10
#self.D = 9
self
.
act_proj
=
self
.
act_cell
self
.
reset_argument
()
self
.
op_type
=
'lstmp'
#self.use_peepholes=False
#self.lod=[[7]]
#self.act_proj='identity'
#self.act_proj='tanh'
T
=
sum
(
self
.
lod
[
0
])
N
=
len
(
self
.
lod
[
0
])
#np.random.seed=123
x
=
np
.
random
.
normal
(
size
=
(
T
,
4
*
self
.
D
)).
astype
(
'float64'
)
if
self
.
has_initial_state
:
h0
=
np
.
random
.
normal
(
size
=
(
N
,
self
.
P
)).
astype
(
'float64'
)
...
...
@@ -238,7 +159,6 @@ class TestLstmpOp(LstmTest.TestLstmOp):
w_rh
=
np
.
random
.
normal
(
size
=
(
self
.
D
,
self
.
P
)).
astype
(
'float64'
)
proj_clip
=
0.1
cell_clip
=
0.1
#import pdb; pdb.set_trace()
r
,
c
=
lstmp
(
x
,
self
.
lod
,
h0
,
c0
,
w
,
w_rh
,
w_b
,
w_c
,
self
.
is_reverse
,
proj_clip
,
cell_clip
,
ACTIVATION
[
self
.
act_gate
],
ACTIVATION
[
self
.
act_cell
],
ACTIVATION
[
self
.
act_cand
],
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录