提交 c1fd1dc7 编写于 作者: T Tao Luo 提交者: GitHub

Merge pull request #5154 from tensor-tang/merge

refine check macro
...@@ -216,17 +216,13 @@ void MKLDNNBatchNormLayer::resetFwdPD( ...@@ -216,17 +216,13 @@ void MKLDNNBatchNormLayer::resetFwdPD(
} }
auto fwdDesc = bn_fwd::desc(pk, in->getMemoryDesc(), EPS, flags_); auto fwdDesc = bn_fwd::desc(pk, in->getMemoryDesc(), EPS, flags_);
pd.reset(new bn_fwd::primitive_desc(fwdDesc, engine_)); pd.reset(new bn_fwd::primitive_desc(fwdDesc, engine_));
// TODO(TJ): use check macro CHECK_PRIMITIVE_DESC_EQ(out, pd->dst_primitive_desc());
CHECK(out);
CHECK(out->getPrimitiveDesc() == pd->dst_primitive_desc());
if (wgt) { if (wgt) {
CHECK(wgt->getPrimitiveDesc() == pd->weights_primitive_desc()); CHECK_PRIMITIVE_DESC_EQ(wgt, pd->weights_primitive_desc());
} }
if (passType_ != PASS_TEST || useGlobalStats_) { if (passType_ != PASS_TEST || useGlobalStats_) {
CHECK(mean_); CHECK_PRIMITIVE_DESC_EQ(mean_, pd->mean_primitive_desc());
CHECK(mean_->getPrimitiveDesc() == pd->mean_primitive_desc()); CHECK_PRIMITIVE_DESC_EQ(var_, pd->variance_primitive_desc());
CHECK(var_);
CHECK(var_->getPrimitiveDesc() == pd->variance_primitive_desc());
} }
} }
...@@ -283,19 +279,14 @@ void MKLDNNBatchNormLayer::resetBwdPD( ...@@ -283,19 +279,14 @@ void MKLDNNBatchNormLayer::resetBwdPD(
if (in == nullptr) { if (in == nullptr) {
return; return;
} }
CHECK(out); CHECK_PRIMITIVE_DESC_EQ(out, in->getPrimitiveDesc());
CHECK(out->getPrimitiveDesc() == in->getPrimitiveDesc());
auto md = in->getMemoryDesc(); auto md = in->getMemoryDesc();
auto bwdDesc = bn_bwd::desc(prop_kind::backward, md, md, EPS, flags_); auto bwdDesc = bn_bwd::desc(prop_kind::backward, md, md, EPS, flags_);
pd.reset(new bn_bwd::primitive_desc(bwdDesc, engine_, *fwdPD_)); pd.reset(new bn_bwd::primitive_desc(bwdDesc, engine_, *fwdPD_));
// TODO(TJ): use check macro
CHECK(wgt);
CHECK(wgt->getPrimitiveDesc() == pd->diff_weights_primitive_desc());
CHECK(pd->weights_primitive_desc() == fwdPD_->weights_primitive_desc()); CHECK(pd->weights_primitive_desc() == fwdPD_->weights_primitive_desc());
CHECK(mean_); CHECK_PRIMITIVE_DESC_EQ(wgt, pd->diff_weights_primitive_desc());
CHECK(mean_->getPrimitiveDesc() == pd->mean_primitive_desc()); CHECK_PRIMITIVE_DESC_EQ(mean_, pd->mean_primitive_desc());
CHECK(var_); CHECK_PRIMITIVE_DESC_EQ(var_, pd->variance_primitive_desc());
CHECK(var_->getPrimitiveDesc() == pd->variance_primitive_desc());
} }
void MKLDNNBatchNormLayer::resetBwdPipeline( void MKLDNNBatchNormLayer::resetBwdPipeline(
......
...@@ -262,12 +262,15 @@ void MKLDNNConvLayer::resetBwdWgtPD( ...@@ -262,12 +262,15 @@ void MKLDNNConvLayer::resetBwdWgtPD(
padR, padR,
padKind); padKind);
pd.reset(new conv_bwdWgt::primitive_desc(bwdWgtDesc, engine_, *fwdPD_)); pd.reset(new conv_bwdWgt::primitive_desc(bwdWgtDesc, engine_, *fwdPD_));
CHECK(pd->src_primitive_desc() == inVal_->getPrimitiveDesc()) CHECK_PRIMITIVE_DESC_EQ(inVal_, pd->src_primitive_desc());
<< "primitive desc of in value should equal"; CHECK_PRIMITIVE_DESC_EQ(
CHECK(pd->diff_dst_primitive_desc() == outVal_->getPrimitiveDesc()) outVal_,
<< "primitive desc of out grad should equal the out value"; pd->diff_dst_primitive_desc(),
CHECK(pd->diff_weights_primitive_desc() == wgtVal_->getPrimitiveDesc()) "primitive desc of out value and grad should be equal");
<< "primitive desc of weight grad should equal the weight value"; CHECK_PRIMITIVE_DESC_EQ(
wgtVal_,
pd->diff_weights_primitive_desc(),
"primitive desc of weight value and grad should be equal");
} }
void MKLDNNConvLayer::resetBwdDataPD( void MKLDNNConvLayer::resetBwdDataPD(
...@@ -292,10 +295,14 @@ void MKLDNNConvLayer::resetBwdDataPD( ...@@ -292,10 +295,14 @@ void MKLDNNConvLayer::resetBwdDataPD(
padR, padR,
padding_kind::zero); padding_kind::zero);
pd.reset(new conv_bwdData::primitive_desc(bwdDataDesc, engine_, *fwdPD_)); pd.reset(new conv_bwdData::primitive_desc(bwdDataDesc, engine_, *fwdPD_));
CHECK(pd->diff_src_primitive_desc() == inVal_->getPrimitiveDesc()) CHECK_PRIMITIVE_DESC_EQ(
<< "primitive desc of in grad should equal the in value"; inVal_,
CHECK(pd->diff_dst_primitive_desc() == outVal_->getPrimitiveDesc()) pd->diff_src_primitive_desc(),
<< "primitive desc of out grad should equal"; "primitive desc of in value and grad should be equal");
CHECK_PRIMITIVE_DESC_EQ(
outVal_,
pd->diff_dst_primitive_desc(),
"primitive desc of out value and grad should be equal");
} }
void MKLDNNConvLayer::resetBwdBuffers( void MKLDNNConvLayer::resetBwdBuffers(
...@@ -310,17 +317,20 @@ void MKLDNNConvLayer::resetBwdBuffers( ...@@ -310,17 +317,20 @@ void MKLDNNConvLayer::resetBwdBuffers(
resetWithMatrix( resetWithMatrix(
wgt, weight_->getWGrad(), wgtPD->diff_weights_primitive_desc()); wgt, weight_->getWGrad(), wgtPD->diff_weights_primitive_desc());
CHECK(wgtVal_ != nullptr && CHECK_PRIMITIVE_DESC_EQ(
wgt->getPrimitiveDesc() == wgtVal_->getPrimitiveDesc()) wgtVal_,
<< "primitive desc of weight grad and value should be equal"; wgt->getPrimitiveDesc(),
"primitive desc of weight grad and value should be equal");
bias = nullptr; bias = nullptr;
if (biases_ && biases_->getWGrad()) { if (biases_ && biases_->getWGrad()) {
resetWithMatrix( resetWithMatrix(
bias, biases_->getWGrad(), wgtPD->diff_bias_primitive_desc()); bias, biases_->getWGrad(), wgtPD->diff_bias_primitive_desc());
CHECK(bias && biasVal_ && CHECK(bias);
bias->getPrimitiveDesc() == biasVal_->getPrimitiveDesc()) CHECK_PRIMITIVE_DESC_EQ(
<< "primitive desc of bias grad should equal the bias value"; biasVal_,
bias->getPrimitiveDesc(),
"primitive desc of bias grad and value should be equal");
} }
if (dataPD == nullptr) { if (dataPD == nullptr) {
......
...@@ -235,8 +235,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in, ...@@ -235,8 +235,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
in = MKLDNNMatrix::create(intPD, inMat); in = MKLDNNMatrix::create(intPD, inMat);
Argument& arg = input->getOutput(this->getName()); Argument& arg = input->getOutput(this->getName());
arg.grad = std::dynamic_pointer_cast<Matrix>(in); arg.grad = std::dynamic_pointer_cast<Matrix>(in);
CHECK(inVal_); CHECK_PRIMITIVE_DESC_EQ(inVal_, intPD);
CHECK(inVal_->getPrimitiveDesc() == intPD) << "the primitive desc must equal";
if (inputIsOnlyMKLDNN()) { if (inputIsOnlyMKLDNN()) {
return; return;
} }
...@@ -250,8 +249,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in, ...@@ -250,8 +249,7 @@ void MKLDNNLayer::resetInGrad(MKLDNNMatrixPtr& in,
CHECK(extInVal_ != nullptr && isPaddleFormat(extInVal_->getFormat())) CHECK(extInVal_ != nullptr && isPaddleFormat(extInVal_->getFormat()))
<< "should have external input value and the format must be nchw(nc)"; << "should have external input value and the format must be nchw(nc)";
extInGrad_ = MKLDNNMatrix::create(extInVal_->getPrimitiveDesc(), inMat); extInGrad_ = MKLDNNMatrix::create(extInVal_->getPrimitiveDesc(), inMat);
CHECK(inVal_ != nullptr && inVal_->getPrimitiveDesc() == intPD) CHECK_PRIMITIVE_DESC_EQ(inVal_, intPD);
<< "should have internal input value and primitive desc must equal";
in = MKLDNNMatrix::create(intPD); in = MKLDNNMatrix::create(intPD);
cvtInGrad_ = MKLDNNMatrix::createReorder(in, extInGrad_); cvtInGrad_ = MKLDNNMatrix::createReorder(in, extInGrad_);
CHECK(cvtInGrad_); CHECK(cvtInGrad_);
...@@ -277,8 +275,7 @@ void MKLDNNLayer::resetOutGrad(MKLDNNMatrixPtr& out, ...@@ -277,8 +275,7 @@ void MKLDNNLayer::resetOutGrad(MKLDNNMatrixPtr& out,
CHECK(extOutVal_ != nullptr && isPaddleFormat(extOutVal_->getFormat())) CHECK(extOutVal_ != nullptr && isPaddleFormat(extOutVal_->getFormat()))
<< "should have external output value and the format must be nchw(nc)"; << "should have external output value and the format must be nchw(nc)";
extOutGrad_ = MKLDNNMatrix::create(extOutVal_->getPrimitiveDesc(), outMat); extOutGrad_ = MKLDNNMatrix::create(extOutVal_->getPrimitiveDesc(), outMat);
CHECK(outVal_ != nullptr && outVal_->getPrimitiveDesc() == intPD) CHECK_PRIMITIVE_DESC_EQ(outVal_, intPD);
<< "should have internal output value and primitive desc must equal";
out = MKLDNNMatrix::create(intPD); out = MKLDNNMatrix::create(intPD);
cvtOutGrad_ = MKLDNNMatrix::createReorder(extOutGrad_, out); cvtOutGrad_ = MKLDNNMatrix::createReorder(extOutGrad_, out);
CHECK(cvtOutGrad_); CHECK(cvtOutGrad_);
......
...@@ -24,6 +24,12 @@ namespace paddle { ...@@ -24,6 +24,12 @@ namespace paddle {
class MKLDNNMatrix; class MKLDNNMatrix;
typedef std::shared_ptr<MKLDNNMatrix> MKLDNNMatrixPtr; typedef std::shared_ptr<MKLDNNMatrix> MKLDNNMatrixPtr;
#define CHECK_PRIMITIVE_DESC_EQ(MAT, PD, ...) \
CHECK(MAT) << " can not be empty."; \
CHECK(MAT->getPrimitiveDesc() == PD) \
<< #MAT "->getPrimitiveDesc() and " #PD " should be equal.\n " \
<< "" __VA_ARGS__;
/** /**
* @brief MKLDNN Matrix. * @brief MKLDNN Matrix.
* *
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册