From 9d108a21a7c25d01c5c7665d76e83577585aefd8 Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Sat, 16 Sep 2017 21:42:25 +0800 Subject: [PATCH] add unit test for mkldnn_pool and pass them --- paddle/gserver/layers/MKLDNNPoolLayer.cpp | 15 ++++-- paddle/gserver/tests/test_MKLDNN.cpp | 66 +++++++++++++++++++++++ 2 files changed, 77 insertions(+), 4 deletions(-) diff --git a/paddle/gserver/layers/MKLDNNPoolLayer.cpp b/paddle/gserver/layers/MKLDNNPoolLayer.cpp index 7ef7ee494d7..26cc50bb53e 100644 --- a/paddle/gserver/layers/MKLDNNPoolLayer.cpp +++ b/paddle/gserver/layers/MKLDNNPoolLayer.cpp @@ -49,13 +49,14 @@ bool MKLDNNPoolLayer::init(const LayerMap& layerMap, if (type == "max-projection") { poolAlgo_ = algorithm::pooling_max; } else if (type == "avg-projection") { - // TODO(TJ): support choosing exclude or include when paddle support it - // paddle only support pooling_avg_exclude_padding yet - poolAlgo_ = algorithm::pooling_avg_exclude_padding; + // TODO(TJ): support choosing exclusive or inclusive when paddle support it + // only can make sure that paddle use exclude when ph==pw==0 + // otherwise, paddle may used mixed or only include. + poolAlgo_ = (ph_ == 0 && pw_ == 0) ? algorithm::pooling_avg_exclude_padding + : algorithm::pooling_avg_include_padding; } else { LOG(FATAL) << "unknow pooling type!"; } - return true; } @@ -177,6 +178,12 @@ void MKLDNNPoolLayer::resetFwdPD(std::shared_ptr& pd, padR, padKind); pd.reset(new pool_fwd::primitive_desc(fwdDesc, engine_)); + if ((ph_ != 0 || pw_ != 0) && (padR[0] > padL[0] || padR[1] > padL[1])) { + LOG(WARNING) + << "With this layer " << getName() << ", mkldnn_pool use " + << "inclusive pooling, while paddle mix inclusice and exclusive." + << "So they may have different results for this layer."; + } // prepare workspace if necessary workspace_ = diff --git a/paddle/gserver/tests/test_MKLDNN.cpp b/paddle/gserver/tests/test_MKLDNN.cpp index e70802881e3..c3e2545e083 100644 --- a/paddle/gserver/tests/test_MKLDNN.cpp +++ b/paddle/gserver/tests/test_MKLDNN.cpp @@ -141,6 +141,72 @@ TEST(MKLDNNLayer, ConvLayer) { testConvLayer({4, 4, 16, 3, 3, 16, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1}); } +struct testPoolDesc { + int bs, ch; // input channel and output channel are the same + int ih, iw; + int oh, ow; + int fh, fw; + int ph, pw; + int sh, sw; +}; + +void testPoolLayer(const testPoolDesc& pm) { + const std::string compareTypes[] = {"mkldnn_pool", "pool"}; + TestConfig cfg; + cfg.layerConfig.set_type(compareTypes[0]); + cfg.layerConfig.set_size(pm.ch * pm.oh * pm.ow); + cfg.inputDefs.push_back( + {INPUT_DATA, + "layer_0", + /* size of input layer= */ size_t(pm.ch * pm.ih * pm.iw), + 0}); + LayerInputConfig* input = cfg.layerConfig.add_inputs(); + PoolConfig* pool = input->mutable_pool_conf(); + // pool->set_pool_type(poolType); + pool->set_channels(pm.ch); + pool->set_img_size(pm.iw); + pool->set_img_size_y(pm.ih); + pool->set_output_x(pm.ow); + pool->set_output_y(pm.oh); + pool->set_size_x(pm.fw); + pool->set_size_y(pm.fh); + pool->set_padding(pm.pw); + pool->set_padding_y(pm.ph); + pool->set_stride(pm.sw); + pool->set_stride_y(pm.sh); + + int oh = outputSize(pm.ih, pm.fh, pm.ph, pm.sh, false); + int ow = outputSize(pm.iw, pm.fw, pm.pw, pm.sw, false); + CHECK_EQ(ow, pm.ow) << "output size check failed"; + CHECK_EQ(oh, pm.oh) << "output size check failed"; + + MKLDNNTester tester; + for (auto type : {"max-projection", "avg-projection"}) { + pool->set_pool_type(type); + TestConfig ref = cfg; + ref.layerConfig.set_type(compareTypes[1]); + for (auto bs : {pm.bs, 1}) { + tester.run(cfg, ref, bs, pm.ih, pm.iw); + } + } +} + +TEST(MkldnnLayer, PoolLayer) { + // For max pooling, MKLDNN has the same result with Paddle. + // For avg pooling, MKLDNN use either inclusive or exclusive pooling, while + // Paddle mixes these two types. So, when encountering some + // test cases with padding>0, they may get different results. + // Then MKLDNN layer will give warnning for these cases. + /* bs, ch, ih, iw, oh, ow, fh, fw, ph, pw, sh, sw*/ + testPoolLayer({2, 1, 4, 4, 2, 2, 3, 3, 0, 0, 2, 2}); + testPoolLayer({10, 8, 16, 16, 8, 8, 2, 2, 0, 0, 2, 2}); + testPoolLayer({4, 2, 5, 5, 3, 3, 3, 3, 1, 1, 2, 2}); + testPoolLayer({8, 16, 56, 56, 28, 28, 3, 3, 0, 0, 2, 2}); + testPoolLayer({8, 16, 14, 14, 7, 7, 3, 3, 0, 0, 2, 2}); + testPoolLayer({4, 16, 7, 7, 1, 1, 7, 7, 0, 0, 1, 1}); + testPoolLayer({4, 2, 5, 5, 3, 3, 5, 5, 1, 1, 1, 1}); +} + // TODO(TJ): add branch test int main(int argc, char** argv) { -- GitLab