Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
01aa670c
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
01aa670c
编写于
4月 17, 2019
作者:
H
Hongyu Liu
提交者:
GitHub
4月 17, 2019
浏览文件
操作
浏览文件
下载
差异文件
Merge pull request #16933 from phlrain/pick_many_infer_fix
Pick many infer fix
上级
591f0879
dc202c25
变更
9
隐藏空白更改
内联
并排
Showing
9 changed file
with
98 addition
and
56 deletion
+98
-56
paddle/fluid/operators/attention_lstm_op.cc
paddle/fluid/operators/attention_lstm_op.cc
+12
-4
paddle/fluid/operators/bpr_loss_op.cc
paddle/fluid/operators/bpr_loss_op.cc
+8
-4
paddle/fluid/operators/conv_shift_op.cc
paddle/fluid/operators/conv_shift_op.cc
+11
-8
paddle/fluid/operators/merge_lod_tensor_op.cc
paddle/fluid/operators/merge_lod_tensor_op.cc
+3
-1
paddle/fluid/operators/positive_negative_pair_op.cc
paddle/fluid/operators/positive_negative_pair_op.cc
+24
-16
paddle/fluid/operators/scatter_op.cc
paddle/fluid/operators/scatter_op.cc
+0
-4
paddle/fluid/operators/split_lod_tensor_op.cc
paddle/fluid/operators/split_lod_tensor_op.cc
+3
-1
paddle/fluid/operators/sum_op.cc
paddle/fluid/operators/sum_op.cc
+15
-1
paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc
paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc
+22
-17
未找到文件。
paddle/fluid/operators/attention_lstm_op.cc
浏览文件 @
01aa670c
...
...
@@ -64,12 +64,19 @@ void AttentionLSTMOp::InferShape(framework::InferShapeContext* ctx) const {
auto
c_dims
=
ctx
->
GetInputDim
(
"C0"
);
PADDLE_ENFORCE_EQ
(
c_dims
.
size
(),
2
,
"Input(C0)'s rank must be 2."
);
PADDLE_ENFORCE_EQ
(
c_dims
[
1
],
D
,
"C0 dims should be N x %d."
,
D
);
if
(
ctx
->
IsRuntime
())
{
PADDLE_ENFORCE_EQ
(
c_dims
[
1
],
D
,
"C0 dims should be N x %d."
,
D
);
}
if
(
ctx
->
HasInput
(
"H0"
))
{
auto
h_dims
=
ctx
->
GetInputDim
(
"H0"
);
PADDLE_ENFORCE
(
h_dims
==
c_dims
,
"The dimension of Input(H0) and Input(C0) "
"should be the same."
);
PADDLE_ENFORCE_EQ
(
h_dims
.
size
(),
2UL
,
"Input(H0)'s rank must be 2."
);
if
(
ctx
->
IsRuntime
()
||
(
framework
::
product
(
c_dims
)
>
0
&&
framework
::
product
(
h_dims
)
>
0
))
{
PADDLE_ENFORCE
(
h_dims
==
c_dims
,
"The dimension of Input(H0) and Input(C0) "
"should be the same."
);
}
}
auto
atten_w_dims
=
ctx
->
GetInputDim
(
"AttentionWeight"
);
...
...
@@ -79,6 +86,7 @@ void AttentionLSTMOp::InferShape(framework::InferShapeContext* ctx) const {
"AttentionWeight shapes must be (%d + %d) * 1."
,
M
,
D
);
PADDLE_ENFORCE_EQ
(
atten_w_dims
[
1
],
1
,
"AttentionWeight shapes must be (%d + %d) * 1."
,
M
,
D
);
if
(
ctx
->
HasInput
(
"AttentionBias"
))
{
auto
atten_b_dims
=
ctx
->
GetInputDim
(
"AttentionBias"
);
PADDLE_ENFORCE_EQ
(
atten_b_dims
.
size
(),
2
,
...
...
paddle/fluid/operators/bpr_loss_op.cc
浏览文件 @
01aa670c
...
...
@@ -32,10 +32,14 @@ class BprLossOp : public framework::OperatorWithKernel {
int
rank
=
x_dims
.
size
();
PADDLE_ENFORCE_EQ
(
rank
,
label_dims
.
size
(),
"Input(X) and Input(Label) shall have the same rank."
);
PADDLE_ENFORCE_EQ
(
framework
::
slice_ddim
(
x_dims
,
0
,
rank
-
1
),
framework
::
slice_ddim
(
label_dims
,
0
,
rank
-
1
),
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension."
);
if
(
ctx
->
IsRuntime
()
||
(
framework
::
product
(
x_dims
)
>
0
&&
framework
::
product
(
label_dims
)
>
0
))
{
PADDLE_ENFORCE_EQ
(
framework
::
slice_ddim
(
x_dims
,
0
,
rank
-
1
),
framework
::
slice_ddim
(
label_dims
,
0
,
rank
-
1
),
"Input(X) and Input(Label) shall have the same shape "
"except the last dimension."
);
}
auto
y_dims
=
x_dims
;
y_dims
[
rank
-
1
]
=
1
;
...
...
paddle/fluid/operators/conv_shift_op.cc
浏览文件 @
01aa670c
...
...
@@ -36,14 +36,17 @@ class ConvShiftOp : public framework::OperatorWithKernel {
auto
y_dims
=
ctx
->
GetInputDim
(
"Y"
);
PADDLE_ENFORCE_EQ
(
x_dims
.
size
(),
2
,
"Input(X)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
y_dims
.
size
(),
2
,
"Input(Y)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
y_dims
[
0
],
"The 1st dimension of Input(X) and Input(Y) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
y_dims
[
1
]
%
2
,
1
,
"The 2nd dimension of Input(Y) should be odd."
);
PADDLE_ENFORCE_LE
(
y_dims
[
1
],
x_dims
[
1
],
"The 2nd dimension of Input(Y) should be less than or "
"equal to the 2nd dimension of Input(X)."
);
if
(
ctx
->
IsRuntime
()
||
(
x_dims
[
0
]
>
0
&&
y_dims
[
0
]
>
0
))
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
y_dims
[
0
],
"The 1st dimension of Input(X) and Input(Y) should "
"be equal."
);
if
(
ctx
->
IsRuntime
()
||
y_dims
[
1
]
>
0
)
PADDLE_ENFORCE_EQ
(
y_dims
[
1
]
%
2
,
1
,
"The 2nd dimension of Input(Y) should be odd."
);
if
(
ctx
->
IsRuntime
()
||
(
x_dims
[
1
]
>
0
&&
y_dims
[
1
]
>
0
))
PADDLE_ENFORCE_LE
(
y_dims
[
1
],
x_dims
[
1
],
"The 2nd dimension of Input(Y) should be less than or "
"equal to the 2nd dimension of Input(X)."
);
ctx
->
ShareDim
(
"X"
,
/*->*/
"Out"
);
ctx
->
ShareLoD
(
"X"
,
/*->*/
"Out"
);
}
...
...
paddle/fluid/operators/merge_lod_tensor_op.cc
浏览文件 @
01aa670c
...
...
@@ -164,7 +164,9 @@ class MergeLoDTensorInferShape : public framework::InferShapeBase {
auto
mask_dim
=
context
->
GetInputDim
(
"Mask"
);
PADDLE_ENFORCE_EQ
(
mask_dim
.
size
(),
2
);
PADDLE_ENFORCE_EQ
(
mask_dim
[
1
],
1
);
if
(
context
->
IsRuntime
()
||
mask_dim
[
1
]
>
0
)
{
PADDLE_ENFORCE_EQ
(
mask_dim
[
1
],
1
);
}
context
->
SetOutputDim
(
"Out"
,
context
->
GetInputDim
(
"InTrue"
));
}
...
...
paddle/fluid/operators/positive_negative_pair_op.cc
浏览文件 @
01aa670c
...
...
@@ -61,23 +61,31 @@ class PositiveNegativePairOp : public framework::OperatorWithKernel {
auto
query_dim
=
ctx
->
GetInputDim
(
"QueryID"
);
PADDLE_ENFORCE_EQ
(
score_dim
.
size
(),
2
,
"Score should be a 2-D tensor."
);
PADDLE_ENFORCE_EQ
(
label_dim
.
size
(),
2
,
"Label should be a 2-D tensor."
);
PADDLE_ENFORCE_EQ
(
label_dim
[
0
],
score_dim
[
0
],
"Tensor Score and Label should have the same height (batch size)."
);
PADDLE_ENFORCE_EQ
(
label_dim
[
1
],
1
,
"The width of Label should be 1, i.e. each item should "
"have a scalar label."
);
PADDLE_ENFORCE
(
query_dim
==
label_dim
,
"QueryID should have the same shape as Label."
);
if
(
ctx
->
HasInput
(
"Weight"
))
{
PADDLE_ENFORCE
(
ctx
->
GetInputDim
(
"Weight"
)
==
label_dim
,
"Weight should have the same shape as Label."
);
if
(
ctx
->
IsRuntime
()
||
(
score_dim
[
0
]
>
0
&&
label_dim
[
0
]
>
0
&&
query_dim
[
0
]
>
0
))
{
PADDLE_ENFORCE_EQ
(
label_dim
[
0
],
score_dim
[
0
],
"Tensor Score and Label should have the same height (batch size)."
);
PADDLE_ENFORCE_EQ
(
label_dim
[
1
],
1
,
"The width of Label should be 1, i.e. each item should "
"have a scalar label."
);
PADDLE_ENFORCE
(
query_dim
==
label_dim
,
"QueryID should have the same shape as Label."
);
if
(
ctx
->
HasInput
(
"Weight"
))
{
PADDLE_ENFORCE
(
ctx
->
GetInputDim
(
"Weight"
)
==
label_dim
,
"Weight should have the same shape as Label."
);
}
int
column
=
ctx
->
Attrs
().
Get
<
int
>
(
"column"
);
auto
depth
=
score_dim
[
1
];
PADDLE_ENFORCE
(
column
<
depth
&&
column
>=
-
depth
,
"Attribute column should be in the range of [-%l, %l)"
,
depth
,
depth
);
}
int
column
=
ctx
->
Attrs
().
Get
<
int
>
(
"column"
);
auto
depth
=
score_dim
[
1
];
PADDLE_ENFORCE
(
column
<
depth
&&
column
>=
-
depth
,
"Attribute column should be in the range of [-%l, %l)"
,
depth
,
depth
);
ctx
->
SetOutputDim
(
"PositivePair"
,
scalar_dim
);
ctx
->
SetOutputDim
(
"NegativePair"
,
scalar_dim
);
...
...
paddle/fluid/operators/scatter_op.cc
浏览文件 @
01aa670c
...
...
@@ -42,10 +42,6 @@ class ScatterOp : public framework::OperatorWithKernel {
PADDLE_ENFORCE_EQ
(
ctx
->
GetInputDim
(
"Updates"
)[
0
],
ctx
->
GetInputDim
(
"Ids"
)[
0
],
"Updates and Ids should have same batch-size."
);
framework
::
DDim
data_dim
(
updates_dims
);
for
(
int
i
=
1
;
i
<
data_dim
.
size
();
++
i
)
{
PADDLE_ENFORCE_EQ
(
data_dim
[
i
],
updates_dims
[
i
]);
}
ctx
->
SetOutputDim
(
"Out"
,
ref_dims
);
}
...
...
paddle/fluid/operators/split_lod_tensor_op.cc
浏览文件 @
01aa670c
...
...
@@ -157,7 +157,9 @@ class SplitLoDTensorInferShape : public framework::InferShapeBase {
auto
mask_dim
=
context
->
GetInputDim
(
"Mask"
);
PADDLE_ENFORCE_EQ
(
mask_dim
.
size
(),
2
);
PADDLE_ENFORCE_EQ
(
mask_dim
[
1
],
1
);
if
(
context
->
IsRuntime
())
{
PADDLE_ENFORCE_EQ
(
mask_dim
[
1
],
1
);
}
context
->
SetOutputDim
(
"OutTrue"
,
context
->
GetInputDim
(
"X"
));
context
->
SetOutputDim
(
"OutFalse"
,
context
->
GetInputDim
(
"X"
));
...
...
paddle/fluid/operators/sum_op.cc
浏览文件 @
01aa670c
...
...
@@ -65,7 +65,21 @@ class SumOp : public framework::OperatorWithKernel {
if
(
framework
::
product
(
in_dim
)
==
0
)
{
in_dim
=
x_dim
;
}
else
{
PADDLE_ENFORCE_EQ
(
in_dim
,
x_dim
,
"Input tensors must have same shape"
);
if
(
ctx
->
IsRuntime
())
{
PADDLE_ENFORCE_EQ
(
in_dim
,
x_dim
,
"Input tensors must have same shape"
);
}
else
{
PADDLE_ENFORCE_EQ
(
in_dim
.
size
(),
x_dim
.
size
(),
"Input tensors must have same shape size"
);
// if in_dim or x_dim has -1, not check equal
for
(
int
i
=
0
;
i
<
x_dim
.
size
();
++
i
)
{
if
(
x_dim
[
i
]
==
-
1
||
in_dim
[
i
]
==
-
1
)
{
continue
;
}
PADDLE_ENFORCE_EQ
(
in_dim
[
i
],
x_dim
[
i
],
"Input tensors must have same shape if not -1"
);
}
}
}
}
ctx
->
SetOutputDim
(
"Out"
,
in_dim
);
...
...
paddle/fluid/operators/teacher_student_sigmoid_loss_op.cc
浏览文件 @
01aa670c
...
...
@@ -34,12 +34,14 @@ class TeacherStudentSigmoidLossOp : public framework::OperatorWithKernel {
PADDLE_ENFORCE_EQ
(
x_dims
.
size
(),
2UL
,
"Input(X)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
label_dims
.
size
(),
2UL
,
"Input(Label)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
label_dims
[
0
],
"The 1st dimension of Input(X) and Input(Label) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
label_dims
[
1
],
1UL
,
"The 2nd dimension of "
"Input(Label) should be 1."
);
if
(
ctx
->
IsRuntime
())
{
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
label_dims
[
0
],
"The 1st dimension of Input(X) and Input(Label) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
label_dims
[
1
],
1UL
,
"The 2nd dimension of "
"Input(Label) should be 1."
);
}
ctx
->
SetOutputDim
(
"Y"
,
{
x_dims
[
0
],
1
});
ctx
->
ShareLoD
(
"X"
,
/*->*/
"Y"
);
}
...
...
@@ -74,17 +76,20 @@ class TeacherStudentSigmoidLossGradientOp
PADDLE_ENFORCE_EQ
(
x_dims
.
size
(),
2
,
"Input(X)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
dy_dims
.
size
(),
2
,
"Input(Y@Grad)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
label_dims
.
size
(),
2
,
"Input(Label)'s rank should be 2."
);
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
label_dims
[
0
],
"The 1st dimension of Input(X) and Input(Label) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
dy_dims
[
0
],
"The 1st dimension of Input(X) and Input(Y@Grad) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
dy_dims
[
1
],
1
,
"The 2nd dimension of Input(Y@Grad) should be 1."
);
PADDLE_ENFORCE_EQ
(
label_dims
[
1
],
1
,
"When Attr(soft_label) == false, the 2nd dimension of "
"Input(Label) should be 1."
);
if
(
ctx
->
IsRuntime
())
{
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
label_dims
[
0
],
"The 1st dimension of Input(X) and Input(Label) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
x_dims
[
0
],
dy_dims
[
0
],
"The 1st dimension of Input(X) and Input(Y@Grad) should "
"be equal."
);
PADDLE_ENFORCE_EQ
(
dy_dims
[
1
],
1
,
"The 2nd dimension of Input(Y@Grad) should be 1."
);
PADDLE_ENFORCE_EQ
(
label_dims
[
1
],
1
,
"When Attr(soft_label) == false, the 2nd dimension of "
"Input(Label) should be 1."
);
}
ctx
->
SetOutputDim
(
framework
::
GradVarName
(
"X"
),
x_dims
);
ctx
->
ShareLoD
(
"X"
,
framework
::
GradVarName
(
"X"
));
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录