Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
aecfeb72
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
aecfeb72
编写于
10月 27, 2017
作者:
T
tensor-tang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
refine check macro
上级
8f4476b8
变更
4
隐藏空白更改
内联
并排
Showing
4 changed file
with
43 addition
and
39 deletion
+43
-39
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
+8
-17
paddle/gserver/layers/MKLDNNConvLayer.cpp
paddle/gserver/layers/MKLDNNConvLayer.cpp
+26
-16
paddle/gserver/layers/MKLDNNLayer.cpp
paddle/gserver/layers/MKLDNNLayer.cpp
+3
-6
paddle/math/MKLDNNMatrix.h
paddle/math/MKLDNNMatrix.h
+6
-0
未找到文件。
paddle/gserver/layers/MKLDNNBatchNormLayer.cpp
浏览文件 @
aecfeb72
...
...
@@ -216,17 +216,13 @@ void MKLDNNBatchNormLayer::resetFwdPD(
}
auto
fwdDesc
=
bn_fwd
::
desc
(
pk
,
in
->
getMemoryDesc
(),
EPS
,
flags_
);
pd
.
reset
(
new
bn_fwd
::
primitive_desc
(
fwdDesc
,
engine_
));
// TODO(TJ): use check macro
CHECK
(
out
);
CHECK
(
out
->
getPrimitiveDesc
()
==
pd
->
dst_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
out
,
pd
->
dst_primitive_desc
());
if
(
wgt
)
{
CHECK
(
wgt
->
getPrimitiveDesc
()
==
pd
->
weights_primitive_desc
());
CHECK
_PRIMITIVE_DESC_EQ
(
wgt
,
pd
->
weights_primitive_desc
());
}
if
(
passType_
!=
PASS_TEST
||
useGlobalStats_
)
{
CHECK
(
mean_
);
CHECK
(
mean_
->
getPrimitiveDesc
()
==
pd
->
mean_primitive_desc
());
CHECK
(
var_
);
CHECK
(
var_
->
getPrimitiveDesc
()
==
pd
->
variance_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
mean_
,
pd
->
mean_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
var_
,
pd
->
variance_primitive_desc
());
}
}
...
...
@@ -283,19 +279,14 @@ void MKLDNNBatchNormLayer::resetBwdPD(
if
(
in
==
nullptr
)
{
return
;
}
CHECK
(
out
);
CHECK
(
out
->
getPrimitiveDesc
()
==
in
->
getPrimitiveDesc
());
CHECK_PRIMITIVE_DESC_EQ
(
out
,
in
->
getPrimitiveDesc
());
auto
md
=
in
->
getMemoryDesc
();
auto
bwdDesc
=
bn_bwd
::
desc
(
prop_kind
::
backward
,
md
,
md
,
EPS
,
flags_
);
pd
.
reset
(
new
bn_bwd
::
primitive_desc
(
bwdDesc
,
engine_
,
*
fwdPD_
));
// TODO(TJ): use check macro
CHECK
(
wgt
);
CHECK
(
wgt
->
getPrimitiveDesc
()
==
pd
->
diff_weights_primitive_desc
());
CHECK
(
pd
->
weights_primitive_desc
()
==
fwdPD_
->
weights_primitive_desc
());
CHECK
(
mean_
);
CHECK
(
mean_
->
getPrimitiveDesc
()
==
pd
->
mean_primitive_desc
());
CHECK
(
var_
);
CHECK
(
var_
->
getPrimitiveDesc
()
==
pd
->
variance_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
wgt
,
pd
->
diff_weights_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
mean_
,
pd
->
mean_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
var_
,
pd
->
variance_primitive_desc
());
}
void
MKLDNNBatchNormLayer
::
resetBwdPipeline
(
...
...
paddle/gserver/layers/MKLDNNConvLayer.cpp
浏览文件 @
aecfeb72
...
...
@@ -262,12 +262,15 @@ void MKLDNNConvLayer::resetBwdWgtPD(
padR
,
padKind
);
pd
.
reset
(
new
conv_bwdWgt
::
primitive_desc
(
bwdWgtDesc
,
engine_
,
*
fwdPD_
));
CHECK
(
pd
->
src_primitive_desc
()
==
inVal_
->
getPrimitiveDesc
())
<<
"primitive desc of in value should equal"
;
CHECK
(
pd
->
diff_dst_primitive_desc
()
==
outVal_
->
getPrimitiveDesc
())
<<
"primitive desc of out grad should equal the out value"
;
CHECK
(
pd
->
diff_weights_primitive_desc
()
==
wgtVal_
->
getPrimitiveDesc
())
<<
"primitive desc of weight grad should equal the weight value"
;
CHECK_PRIMITIVE_DESC_EQ
(
inVal_
,
pd
->
src_primitive_desc
());
CHECK_PRIMITIVE_DESC_EQ
(
outVal_
,
pd
->
diff_dst_primitive_desc
(),
"primitive desc of out value and grad should be equal"
);
CHECK_PRIMITIVE_DESC_EQ
(
wgtVal_
,
pd
->
diff_weights_primitive_desc
(),
"primitive desc of weight value and grad should be equal"
);
}
void
MKLDNNConvLayer
::
resetBwdDataPD
(
...
...
@@ -292,10 +295,14 @@ void MKLDNNConvLayer::resetBwdDataPD(
padR
,
padding_kind
::
zero
);
pd
.
reset
(
new
conv_bwdData
::
primitive_desc
(
bwdDataDesc
,
engine_
,
*
fwdPD_
));
CHECK
(
pd
->
diff_src_primitive_desc
()
==
inVal_
->
getPrimitiveDesc
())
<<
"primitive desc of in grad should equal the in value"
;
CHECK
(
pd
->
diff_dst_primitive_desc
()
==
outVal_
->
getPrimitiveDesc
())
<<
"primitive desc of out grad should equal"
;
CHECK_PRIMITIVE_DESC_EQ
(
inVal_
,
pd
->
diff_src_primitive_desc
(),
"primitive desc of in value and grad should be equal"
);
CHECK_PRIMITIVE_DESC_EQ
(
outVal_
,
pd
->
diff_dst_primitive_desc
(),
"primitive desc of out value and grad should be equal"
);
}
void
MKLDNNConvLayer
::
resetBwdBuffers
(
...
...
@@ -310,17 +317,20 @@ void MKLDNNConvLayer::resetBwdBuffers(
resetWithMatrix
(
wgt
,
weight_
->
getWGrad
(),
wgtPD
->
diff_weights_primitive_desc
());
CHECK
(
wgtVal_
!=
nullptr
&&
wgt
->
getPrimitiveDesc
()
==
wgtVal_
->
getPrimitiveDesc
())
<<
"primitive desc of weight grad and value should be equal"
;
CHECK_PRIMITIVE_DESC_EQ
(
wgtVal_
,
wgt
->
getPrimitiveDesc
(),
"primitive desc of weight grad and value should be equal"
);
bias
=
nullptr
;
if
(
biases_
&&
biases_
->
getWGrad
())
{
resetWithMatrix
(
bias
,
biases_
->
getWGrad
(),
wgtPD
->
diff_bias_primitive_desc
());
CHECK
(
bias
&&
biasVal_
&&
bias
->
getPrimitiveDesc
()
==
biasVal_
->
getPrimitiveDesc
())
<<
"primitive desc of bias grad should equal the bias value"
;
CHECK
(
bias
);
CHECK_PRIMITIVE_DESC_EQ
(
biasVal_
,
bias
->
getPrimitiveDesc
(),
"primitive desc of bias grad and value should be equal"
);
}
if
(
dataPD
==
nullptr
)
{
...
...
paddle/gserver/layers/MKLDNNLayer.cpp
浏览文件 @
aecfeb72
...
...
@@ -235,8 +235,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
in
=
MKLDNNMatrix
::
create
(
intPD
,
inMat
);
Argument
&
arg
=
input
->
getOutput
(
this
->
getName
());
arg
.
grad
=
std
::
dynamic_pointer_cast
<
Matrix
>
(
in
);
CHECK
(
inVal_
);
CHECK
(
inVal_
->
getPrimitiveDesc
()
==
intPD
)
<<
"the primitive desc must equal"
;
CHECK_PRIMITIVE_DESC_EQ
(
inVal_
,
intPD
);
if
(
inputIsOnlyMKLDNN
())
{
return
;
}
...
...
@@ -250,8 +249,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
CHECK
(
extInVal_
!=
nullptr
&&
isPaddleFormat
(
extInVal_
->
getFormat
()))
<<
"should have external input value and the format must be nchw(nc)"
;
extInGrad_
=
MKLDNNMatrix
::
create
(
extInVal_
->
getPrimitiveDesc
(),
inMat
);
CHECK
(
inVal_
!=
nullptr
&&
inVal_
->
getPrimitiveDesc
()
==
intPD
)
<<
"should have internal input value and primitive desc must equal"
;
CHECK_PRIMITIVE_DESC_EQ
(
inVal_
,
intPD
);
in
=
MKLDNNMatrix
::
create
(
intPD
);
cvtInGrad_
=
MKLDNNMatrix
::
createReorder
(
in
,
extInGrad_
);
CHECK
(
cvtInGrad_
);
...
...
@@ -277,8 +275,7 @@ void MKLDNNLayer::resetOutGrad(MKLDNNMatrixPtr& out,
CHECK
(
extOutVal_
!=
nullptr
&&
isPaddleFormat
(
extOutVal_
->
getFormat
()))
<<
"should have external output value and the format must be nchw(nc)"
;
extOutGrad_
=
MKLDNNMatrix
::
create
(
extOutVal_
->
getPrimitiveDesc
(),
outMat
);
CHECK
(
outVal_
!=
nullptr
&&
outVal_
->
getPrimitiveDesc
()
==
intPD
)
<<
"should have internal output value and primitive desc must equal"
;
CHECK_PRIMITIVE_DESC_EQ
(
outVal_
,
intPD
);
out
=
MKLDNNMatrix
::
create
(
intPD
);
cvtOutGrad_
=
MKLDNNMatrix
::
createReorder
(
extOutGrad_
,
out
);
CHECK
(
cvtOutGrad_
);
...
...
paddle/math/MKLDNNMatrix.h
浏览文件 @
aecfeb72
...
...
@@ -24,6 +24,12 @@ namespace paddle {
class
MKLDNNMatrix
;
typedef
std
::
shared_ptr
<
MKLDNNMatrix
>
MKLDNNMatrixPtr
;
#define CHECK_PRIMITIVE_DESC_EQ(MAT, PD, ...) \
CHECK(MAT) << " can not be empty."; \
CHECK(MAT->getPrimitiveDesc() == PD) \
<< #MAT "->getPrimitiveDesc() and " #PD " should be equal.\n " \
<< "" __VA_ARGS__;
/**
* @brief MKLDNN Matrix.
*
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录