Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
5fe3b638
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
5fe3b638
编写于
4月 14, 2020
作者:
H
huzhiqiang
提交者:
GitHub
4月 14, 2020
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[error message enhancement] fused_elemwise_activation_op and fusion_conv_inception_op (#23686)
上级
c4e6e206
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
79 addition
and
34 deletion
+79
-34
paddle/fluid/operators/fused/fused_elemwise_activation_op.cc
paddle/fluid/operators/fused/fused_elemwise_activation_op.cc
+64
-30
paddle/fluid/operators/fused/fusion_conv_inception_op.cc
paddle/fluid/operators/fused/fusion_conv_inception_op.cc
+15
-4
未找到文件。
paddle/fluid/operators/fused/fused_elemwise_activation_op.cc
浏览文件 @
5fe3b638
...
...
@@ -20,7 +20,11 @@ namespace paddle {
namespace
operators
{
bool
IsUnaryCompound
(
const
std
::
vector
<
std
::
string
>
&
functor_list
)
{
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
);
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
,
platform
::
errors
::
InvalidArgument
(
"Invalid functor list size %d, which should be equal to %d."
,
functor_list
.
size
(),
2
));
static
std
::
unordered_set
<
std
::
string
>
binary_fun
=
{
"elementwise_add"
,
"elementwise_mul"
,
"elementwise_add_grad"
,
"elementwise_mul_grad"
};
...
...
@@ -28,7 +32,11 @@ bool IsUnaryCompound(const std::vector<std::string> &functor_list) {
}
bool
HasInPlaceUnary
(
const
std
::
vector
<
std
::
string
>
&
functor_list
)
{
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
);
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
,
platform
::
errors
::
InvalidArgument
(
"Invalid functor list size %d, which should be equal to %d."
,
functor_list
.
size
(),
2
));
static
std
::
unordered_set
<
std
::
string
>
InplaceOpSet
=
{
"relu"
,
"relu_grad"
};
bool
is_in_place
=
false
;
for
(
auto
&
func_name
:
functor_list
)
{
...
...
@@ -38,7 +46,11 @@ bool HasInPlaceUnary(const std::vector<std::string> &functor_list) {
}
bool
InputXCanBeAbsent
(
const
std
::
vector
<
std
::
string
>
&
functor_list
)
{
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
);
PADDLE_ENFORCE_EQ
(
functor_list
.
size
(),
2
,
platform
::
errors
::
InvalidArgument
(
"Invalid functor list size %d, which should be equal to %d."
,
functor_list
.
size
(),
2
));
static
std
::
unordered_set
<
std
::
string
>
binary_fun
=
{
"elementwise_add_grad"
};
return
binary_fun
.
count
(
functor_list
[
0
])
!=
0
||
binary_fun
.
count
(
functor_list
[
1
])
!=
0
;
...
...
@@ -50,7 +62,11 @@ bool InputXCanBeAbsent(const std::vector<std::string> &functor_list) {
* out.
*/
static
bool
IsSupportedCompound
(
const
std
::
vector
<
std
::
string
>
&
functors
)
{
PADDLE_ENFORCE_EQ
(
functors
.
size
(),
2UL
);
PADDLE_ENFORCE_EQ
(
functors
.
size
(),
2UL
,
platform
::
errors
::
InvalidArgument
(
"Invalid functor list size %d, which should be equal to %d."
,
functors
.
size
(),
2
));
static
std
::
unordered_set
<
std
::
string
>
unary_fun
=
{
"scale"
,
"relu"
,
"tanh"
,
"sigmoid"
};
...
...
@@ -63,11 +79,12 @@ static bool IsSupportedCompound(const std::vector<std::string> &functors) {
}
else
if
(
binary_fun
.
count
(
functors
[
1
]))
{
unary_fun_str
=
functors
[
0
];
}
else
{
PADDLE_THROW
(
"%s and %s are not included in fused_list."
,
functors
[
0
],
functors
[
1
]
);
PADDLE_THROW
(
platform
::
errors
::
InvalidArgument
(
"%s and %s are not included in fused_list."
,
functors
[
0
],
functors
[
1
])
);
}
PADDLE_ENFORCE_EQ
(
unary_fun
.
count
(
unary_fun_str
),
1
,
"%s is not included in fused_list."
,
unary_fun_str
);
platform
::
errors
::
InvalidArgument
(
"%s is not included in fused_list."
,
unary_fun_str
));
return
true
;
}
...
...
@@ -76,15 +93,18 @@ class FusedElemwiseActivationOp : public framework::OperatorWithKernel {
using
framework
::
OperatorWithKernel
::
OperatorWithKernel
;
void
InferShape
(
framework
::
InferShapeContext
*
ctx
)
const
override
{
PADDLE_ENFORCE
(
ctx
->
HasInput
(
"X"
),
"Input(X) of FusedElemwiseActivationOp op should not be null."
);
PADDLE_ENFORCE
(
ctx
->
HasInput
(
"Y"
),
"Input(Y) of FusedElemwiseActivationOp op should not be null."
);
PADDLE_ENFORCE
(
ctx
->
HasOutput
(
"Out"
),
"Output(Out) of FusedElemwiseActivationOp op should not be null."
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
"X"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(X) of FusedElemwiseActivationOp op should not be null."
));
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
"Y"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(Y) of FusedElemwiseActivationOp op should not be null."
));
PADDLE_ENFORCE_EQ
(
ctx
->
HasOutput
(
"Out"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Output(Out) of FusedElemwiseActivationOp op should not be null."
));
auto
x_dim
=
ctx
->
GetInputDim
(
"X"
);
auto
y_dim
=
ctx
->
GetInputDim
(
"Y"
);
...
...
@@ -97,9 +117,11 @@ class FusedElemwiseActivationOp : public framework::OperatorWithKernel {
std
::
string
out_lod
=
bcast_y
?
"X"
:
"Y"
;
if
(
ctx
->
Attrs
().
Get
<
bool
>
(
"save_intermediate_out"
))
{
PADDLE_ENFORCE
(
ctx
->
HasOutput
(
"IntermediateOut"
),
"Output(IntermediateOut) of FusedElemwiseActivationOp "
"should not be null."
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasOutput
(
"IntermediateOut"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Output(IntermediateOut) of FusedElemwiseActivationOp "
"should not be null."
));
if
(
IsUnaryCompound
(
ctx
->
Attrs
().
Get
<
std
::
vector
<
std
::
string
>>
(
"functor_list"
)))
{
...
...
@@ -139,7 +161,8 @@ class FusedElemwiseActivationOp : public framework::OperatorWithKernel {
const
framework
::
ExecutionContext
&
ctx
)
const
override
{
PADDLE_ENFORCE_EQ
(
ctx
.
Input
<
framework
::
Tensor
>
(
"X"
)
->
type
(),
ctx
.
Input
<
framework
::
Tensor
>
(
"Y"
)
->
type
(),
"The element's type of input should be the same."
);
platform
::
errors
::
InvalidArgument
(
"The element's type of input should be the same."
));
return
framework
::
OpKernelType
(
OperatorWithKernel
::
IndicateVarDataType
(
ctx
,
"X"
),
ctx
.
GetPlace
());
}
...
...
@@ -173,7 +196,10 @@ class FusedElemwiseActivationMaker : public framework::OpProtoAndCheckerMaker {
AddAttr
<
std
::
vector
<
std
::
string
>>
(
"functor_list"
,
"The functors that should be fused."
)
.
AddCustomChecker
([
&
](
const
std
::
vector
<
std
::
string
>
&
functor_list
)
{
PADDLE_ENFORCE
(
IsSupportedCompound
(
functor_list
));
PADDLE_ENFORCE_EQ
(
IsSupportedCompound
(
functor_list
),
true
,
platform
::
errors
::
InvalidArgument
(
"the input functors should support compounding."
));
});
AddComment
(
R"DOC(
...
...
@@ -266,18 +292,22 @@ class FusedElemwiseActivationOpGrad : public framework::OperatorWithKernel {
using
framework
::
OperatorWithKernel
::
OperatorWithKernel
;
void
InferShape
(
framework
::
InferShapeContext
*
ctx
)
const
override
{
PADDLE_ENFORCE
(
ctx
->
HasInput
(
framework
::
GradVarName
(
"Out"
)),
"Input(Out@Grad) should not be null"
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
framework
::
GradVarName
(
"Out"
)),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(Out@Grad) should not be null."
));
auto
functor_list
=
ctx
->
Attrs
().
Get
<
std
::
vector
<
std
::
string
>>
(
"functor_list"
);
if
(
ctx
->
Attrs
().
Get
<
bool
>
(
"save_intermediate_out"
))
{
PADDLE_ENFORCE
(
ctx
->
HasInput
(
"IntermediateOut"
),
"Input(IntermediateOut) should not be null"
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
"IntermediateOut"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(IntermediateOut) should not be null."
));
}
else
{
if
(
!
InputXCanBeAbsent
(
functor_list
))
{
PADDLE_ENFORCE
(
ctx
->
HasInput
(
"X"
),
"Input(X) should not be null"
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
"X"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(X) should not be null."
));
}
}
...
...
@@ -292,9 +322,11 @@ class FusedElemwiseActivationOpGrad : public framework::OperatorWithKernel {
}
else
{
// Currently, only when Binary is elementwise_add or elementwise_sub,
// the "X" could be absent.
PADDLE_ENFORCE
(
InputXCanBeAbsent
(
functor_list
),
"Only when BinaryFunctor is elementwise_add, the 'X' "
"could be absent."
);
PADDLE_ENFORCE_EQ
(
InputXCanBeAbsent
(
functor_list
),
true
,
platform
::
errors
::
InvalidArgument
(
"Only when BinaryFunctor is elementwise_add, the 'X' "
"could be absent."
));
// Node: If "X" is absence, the shape of Y should be a continuous
// subsequence of X, otherwise, we could not infer the shape of dx.
...
...
@@ -306,7 +338,9 @@ class FusedElemwiseActivationOpGrad : public framework::OperatorWithKernel {
}
if
(
ctx
->
HasOutput
(
y_grad_name
))
{
PADDLE_ENFORCE
(
ctx
->
HasInput
(
"Y"
),
"Input(Y) should not be null"
);
PADDLE_ENFORCE_EQ
(
ctx
->
HasInput
(
"Y"
),
true
,
platform
::
errors
::
InvalidArgument
(
"Input(Y) should not be null."
));
ctx
->
SetOutputDim
(
y_grad_name
,
ctx
->
GetInputDim
(
"Y"
));
ctx
->
ShareLoD
(
"Y"
,
y_grad_name
);
}
...
...
paddle/fluid/operators/fused/fusion_conv_inception_op.cc
浏览文件 @
5fe3b638
...
...
@@ -32,10 +32,21 @@ class ConvInceptionFusionOp : public framework::OperatorWithKernel {
// 4 filters
auto
w_dims
=
ctx
->
GetInputsDim
(
"Filter"
);
PADDLE_ENFORCE
(
in_dims
.
size
(),
4
,
"Conv intput should be 4-D tensor."
);
PADDLE_ENFORCE_EQ
(
w_dims
.
size
(),
4
,
"There should be 4 filters"
);
PADDLE_ENFORCE_EQ
(
w_dims
[
0
][
1
],
in_dims
[
1
]);
PADDLE_ENFORCE_EQ
(
w_dims
[
1
][
1
],
in_dims
[
1
]);
PADDLE_ENFORCE_EQ
(
in_dims
.
size
(),
4
,
platform
::
errors
::
InvalidArgument
(
"Conv intput should be 4-D tensor."
));
PADDLE_ENFORCE_EQ
(
w_dims
.
size
(),
4
,
platform
::
errors
::
InvalidArgument
(
"There should be 4 filters."
));
PADDLE_ENFORCE_EQ
(
w_dims
[
0
][
1
],
in_dims
[
1
],
platform
::
errors
::
InvalidArgument
(
"Invalid fileter channel number %d, which should be "
"equal to input channel number %d."
,
w_dims
[
0
][
1
],
in_dims
[
1
]));
PADDLE_ENFORCE_EQ
(
w_dims
[
1
][
1
],
in_dims
[
1
],
platform
::
errors
::
InvalidArgument
(
"Invalid fileter channel number %d, which should be "
"equal to input channel number %d."
,
w_dims
[
1
][
1
],
in_dims
[
1
]));
int
n
=
in_dims
[
0
];
// compute output channel
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录