diff --git a/paddle/fluid/operators/pool_op.cc b/paddle/fluid/operators/pool_op.cc index bde087e080d6ac4012b9d37c989a70002556badd..67093e6c6daf546a58b05293a0877c9b2eda6ab0 100644 --- a/paddle/fluid/operators/pool_op.cc +++ b/paddle/fluid/operators/pool_op.cc @@ -93,7 +93,7 @@ void PoolOp::InferShape(framework::InferShapeContext* ctx) const { } else { for (size_t i = 0; i < data_dims.size(); ++i) { if ((!ctx->IsRuntime()) && (data_dims[i] < 0)) { - output_shape.push_back(in_x_dims[i]); + output_shape.push_back(data_dims[i]); } else { output_shape.push_back( PoolOutputSize(data_dims[i], ksize[i], paddings[2 * i], @@ -118,8 +118,6 @@ void PoolOp::InferShape(framework::InferShapeContext* ctx) const { framework::OpKernelType PoolOp::GetExpectedKernelType( const framework::ExecutionContext& ctx) const { framework::LibraryType library_{framework::LibraryType::kPlain}; - // std::string data_format = ctx.Attr("data_format"); // change: - // delete std::string data_format = "AnyLayout"; framework::DataLayout layout_ = framework::StringToDataLayout(data_format); @@ -150,8 +148,6 @@ void PoolOpGrad::InferShape(framework::InferShapeContext* ctx) const { framework::OpKernelType PoolOpGrad::GetExpectedKernelType( const framework::ExecutionContext& ctx) const { framework::LibraryType library_{framework::LibraryType::kPlain}; - // std::string data_format = ctx.Attr("data_format"); // - // change:delete std::string data_format = "AnyLayout"; framework::DataLayout layout_ = framework::StringToDataLayout(data_format); diff --git a/python/paddle/fluid/tests/unittests/test_pool2d_op.py b/python/paddle/fluid/tests/unittests/test_pool2d_op.py index b2f09b6a1350b8235e07eb394dddd2d676be53f9..d5cc142b2a856cce93c8f8ef8f1bdf47f3a8e9d7 100644 --- a/python/paddle/fluid/tests/unittests/test_pool2d_op.py +++ b/python/paddle/fluid/tests/unittests/test_pool2d_op.py @@ -968,6 +968,18 @@ class TestPool2dAPI(OpTest): append_batch_size=False, dtype="float32") + input_NHWC_negetive = fluid.layers.data( + name="input_NHWC_negetive", + shape=[2, -1, 5, 3], + append_batch_size=False, + dtype="float32") + + input_NCHW_negetive = fluid.layers.data( + name="input_NCHW_negetive", + shape=[2, 3, -1, -1], + append_batch_size=False, + dtype="float32") + ksize = [3, 3] out_1 = fluid.layers.pool2d( input=input_NHWC, @@ -1034,11 +1046,34 @@ class TestPool2dAPI(OpTest): use_cudnn=False, data_format="NHWC") + # test negetive + out_9 = fluid.layers.pool2d( + input=input_NHWC_negetive, + pool_size=ksize, + pool_type="avg", + pool_padding=[0, 0], + use_cudnn=False, + data_format="NHWC") + assert out_9.shape == (2, -1, 3, 3) + + out_10 = fluid.layers.pool2d( + input=input_NCHW_negetive, + pool_size=ksize, + pool_type="avg", + pool_padding=[0, 0], + use_cudnn=False, + data_format="NCHW") + assert out_10.shape == (2, 3, -1, -1) + exe = fluid.Executor(place=fluid.CPUPlace()) [res_1, res_2, res_3, res_4, res_5, res_6, res_7, res_8] = exe.run( fluid.default_main_program(), - feed={"input_NHWC": x_NHWC, - "input_NCHW": x_NCHW}, + feed={ + "input_NHWC": x_NHWC, + "input_NCHW": x_NCHW, + "input_NHWC_negetive": x_NHWC, + "input_NCHW_negetive": x_NCHW + }, fetch_list=[ out_1, out_2, out_3, out_4, out_5, out_6, out_7, out_8 ])