提交 9d108a21 编写于 作者: T tensor-tang

add unit test for mkldnn_pool and pass them

上级 2a98cba2
...@@ -49,13 +49,14 @@ bool MKLDNNPoolLayer::init(const LayerMap& layerMap, ...@@ -49,13 +49,14 @@ bool MKLDNNPoolLayer::init(const LayerMap& layerMap,
if (type == "max-projection") { if (type == "max-projection") {
poolAlgo_ = algorithm::pooling_max; poolAlgo_ = algorithm::pooling_max;
} else if (type == "avg-projection") { } else if (type == "avg-projection") {
// TODO(TJ): support choosing exclude or include when paddle support it // TODO(TJ): support choosing exclusive or inclusive when paddle support it
// paddle only support pooling_avg_exclude_padding yet // only can make sure that paddle use exclude when ph==pw==0
poolAlgo_ = algorithm::pooling_avg_exclude_padding; // otherwise, paddle may used mixed or only include.
poolAlgo_ = (ph_ == 0 && pw_ == 0) ? algorithm::pooling_avg_exclude_padding
: algorithm::pooling_avg_include_padding;
} else { } else {
LOG(FATAL) << "unknow pooling type!"; LOG(FATAL) << "unknow pooling type!";
} }
return true; return true;
} }
...@@ -177,6 +178,12 @@ void MKLDNNPoolLayer::resetFwdPD(std::shared_ptr<pool_fwd::primitive_desc>& pd, ...@@ -177,6 +178,12 @@ void MKLDNNPoolLayer::resetFwdPD(std::shared_ptr<pool_fwd::primitive_desc>& pd,
padR, padR,
padKind); padKind);
pd.reset(new pool_fwd::primitive_desc(fwdDesc, engine_)); pd.reset(new pool_fwd::primitive_desc(fwdDesc, engine_));
if ((ph_ != 0 || pw_ != 0) && (padR[0] > padL[0] || padR[1] > padL[1])) {
LOG(WARNING)
<< "With this layer " << getName() << ", mkldnn_pool use "
<< "inclusive pooling, while paddle mix inclusice and exclusive."
<< "So they may have different results for this layer.";
}
// prepare workspace if necessary // prepare workspace if necessary
workspace_ = workspace_ =
......
...@@ -141,6 +141,72 @@ TEST(MKLDNNLayer, ConvLayer) { ...@@ -141,6 +141,72 @@ TEST(MKLDNNLayer, ConvLayer) {
testConvLayer({4, 4, 16, 3, 3, 16, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1}); testConvLayer({4, 4, 16, 3, 3, 16, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1});
} }
struct testPoolDesc {
int bs, ch; // input channel and output channel are the same
int ih, iw;
int oh, ow;
int fh, fw;
int ph, pw;
int sh, sw;
};
void testPoolLayer(const testPoolDesc& pm) {
const std::string compareTypes[] = {"mkldnn_pool", "pool"};
TestConfig cfg;
cfg.layerConfig.set_type(compareTypes[0]);
cfg.layerConfig.set_size(pm.ch * pm.oh * pm.ow);
cfg.inputDefs.push_back(
{INPUT_DATA,
"layer_0",
/* size of input layer= */ size_t(pm.ch * pm.ih * pm.iw),
0});
LayerInputConfig* input = cfg.layerConfig.add_inputs();
PoolConfig* pool = input->mutable_pool_conf();
// pool->set_pool_type(poolType);
pool->set_channels(pm.ch);
pool->set_img_size(pm.iw);
pool->set_img_size_y(pm.ih);
pool->set_output_x(pm.ow);
pool->set_output_y(pm.oh);
pool->set_size_x(pm.fw);
pool->set_size_y(pm.fh);
pool->set_padding(pm.pw);
pool->set_padding_y(pm.ph);
pool->set_stride(pm.sw);
pool->set_stride_y(pm.sh);
int oh = outputSize(pm.ih, pm.fh, pm.ph, pm.sh, false);
int ow = outputSize(pm.iw, pm.fw, pm.pw, pm.sw, false);
CHECK_EQ(ow, pm.ow) << "output size check failed";
CHECK_EQ(oh, pm.oh) << "output size check failed";
MKLDNNTester tester;
for (auto type : {"max-projection", "avg-projection"}) {
pool->set_pool_type(type);
TestConfig ref = cfg;
ref.layerConfig.set_type(compareTypes[1]);
for (auto bs : {pm.bs, 1}) {
tester.run(cfg, ref, bs, pm.ih, pm.iw);
}
}
}
TEST(MkldnnLayer, PoolLayer) {
// For max pooling, MKLDNN has the same result with Paddle.
// For avg pooling, MKLDNN use either inclusive or exclusive pooling, while
// Paddle mixes these two types. So, when encountering some
// test cases with padding>0, they may get different results.
// Then MKLDNN layer will give warnning for these cases.
/* bs, ch, ih, iw, oh, ow, fh, fw, ph, pw, sh, sw*/
testPoolLayer({2, 1, 4, 4, 2, 2, 3, 3, 0, 0, 2, 2});
testPoolLayer({10, 8, 16, 16, 8, 8, 2, 2, 0, 0, 2, 2});
testPoolLayer({4, 2, 5, 5, 3, 3, 3, 3, 1, 1, 2, 2});
testPoolLayer({8, 16, 56, 56, 28, 28, 3, 3, 0, 0, 2, 2});
testPoolLayer({8, 16, 14, 14, 7, 7, 3, 3, 0, 0, 2, 2});
testPoolLayer({4, 16, 7, 7, 1, 1, 7, 7, 0, 0, 1, 1});
testPoolLayer({4, 2, 5, 5, 3, 3, 5, 5, 1, 1, 1, 1});
}
// TODO(TJ): add branch test // TODO(TJ): add branch test
int main(int argc, char** argv) { int main(int argc, char** argv) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册