From 7ad39c4077b9bc50ab61079be4e7117140a9b18b Mon Sep 17 00:00:00 2001 From: chengduo Date: Tue, 28 Aug 2018 13:16:32 +0800 Subject: [PATCH] Enhance pad_constant_like_op (#12999) * enhance pad_constant_like_op * add API * add API --- paddle/fluid/API.spec | 1 + .../fluid/operators/pad_constant_like_op.cc | 16 ++++ python/paddle/fluid/layers/nn.py | 83 ++++++++++++++++++- 3 files changed, 99 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index a9ca2606215..7ae0f445a8d 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -147,6 +147,7 @@ paddle.fluid.layers.reshape ArgSpec(args=['x', 'shape', 'actual_shape', 'act', ' paddle.fluid.layers.lod_reset ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)) paddle.fluid.layers.lrn ArgSpec(args=['input', 'n', 'k', 'alpha', 'beta', 'name'], varargs=None, keywords=None, defaults=(5, 1.0, 0.0001, 0.75, None)) paddle.fluid.layers.pad ArgSpec(args=['x', 'paddings', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)) +paddle.fluid.layers.pad_constant_like ArgSpec(args=['x', 'y', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)) paddle.fluid.layers.label_smooth ArgSpec(args=['label', 'prior_dist', 'epsilon', 'dtype', 'name'], varargs=None, keywords=None, defaults=(None, 0.1, 'float32', None)) paddle.fluid.layers.roi_pool ArgSpec(args=['input', 'rois', 'pooled_height', 'pooled_width', 'spatial_scale'], varargs=None, keywords=None, defaults=(1, 1, 1.0)) paddle.fluid.layers.dice_loss ArgSpec(args=['input', 'label', 'epsilon'], varargs=None, keywords=None, defaults=(1e-05,)) diff --git a/paddle/fluid/operators/pad_constant_like_op.cc b/paddle/fluid/operators/pad_constant_like_op.cc index 5958811d38f..37646c7b4c5 100644 --- a/paddle/fluid/operators/pad_constant_like_op.cc +++ b/paddle/fluid/operators/pad_constant_like_op.cc @@ -43,6 +43,14 @@ class PadConstantLikeOp : public framework::OperatorWithKernel { ctx->SetOutputDim("Out", x_dim); ctx->ShareLoD("X", /*->*/ "Out"); } + + protected: + framework::OpKernelType GetExpectedKernelType( + const framework::ExecutionContext &ctx) const override { + return framework::OpKernelType( + framework::ToDataType(ctx.Input("Y")->type()), + ctx.device_context()); + } }; class PadConstantLikeOpMaker : public framework::OpProtoAndCheckerMaker { @@ -159,6 +167,14 @@ class PadConstantLikeOpGrad : public framework::OperatorWithKernel { } } } + + protected: + framework::OpKernelType GetExpectedKernelType( + const framework::ExecutionContext &ctx) const override { + return framework::OpKernelType( + framework::ToDataType(ctx.Input("Y")->type()), + ctx.device_context()); + } }; class PadConstantLikeOpGradMaker : public framework::SingleGradOpDescMaker { diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index f98b18afa7c..3e3f8841378 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -88,6 +88,7 @@ __all__ = [ 'lod_reset', 'lrn', 'pad', + 'pad_constant_like', 'label_smooth', 'roi_pool', 'dice_loss', @@ -4755,6 +4756,86 @@ def pad(x, paddings, pad_value=0., name=None): return out +def pad_constant_like(x, y, pad_value=0., name=None): + """ + Pad input(Y) with :attr:`pad_value`, the number of values padded to + the edges of each axis is specified by the difference of the shape + of X and Y. ((0, shape_x_0 - shape_y_0), ... (0, shape_x_n - shape_y_n)) + unique pad widths for each axis. The input should be a k-D + tensor(k > 0 and k < 7). + + See below for an example. + + .. code-block:: text + + Given: + X = [[[[ 0, 1, 2], + [ 3, 4, 5]], + [[ 6, 7, 8], + [ 9, 10, 11]], + [[12, 13, 14], + [15, 16, 17]]], + [[[18, 19, 20], + [21, 22, 23]], + [[24, 25, 26], + [27, 28, 29]], + [[30, 31, 32], + [33, 34, 35]]]] + X.shape = (2, 3, 2, 3) + + Y = [[[[35, 36, 37]], + [[38, 39, 40]], + [[41, 42, 43]]]] + Y.shape = (1, 3, 1, 3) + + And + pad_value = -1, + + Return: + Out = [[[[35, 36, 37], + [-1, -1, -1]], + [[38, 39, 40], + [-1, -1, -1]], + [[41, 42, 43], + [-1, -1, -1]]], + [[[-1, -1, -1], + [-1, -1, -1]], + [[-1, -1, -1], + [-1, -1, -1]], + [[-1, -1, -1], + [-1, -1, -1]]]] + Out.shape = (2, 3, 2, 3) + + Args: + x (Variable): The input tensor variable. + y (Variable): The input tensor variable. + pad_value (float): The constant value used to pad. + name(str|None): A name for this layer(optional). If set None, the layer + will be named automatically. + + Returns: + Variable: The padded tensor variable. + + Examples: + .. code-block:: python + + # x is a rank 4 tensor variable, x.shape = (2, 3, 2, 3) + # y is a rank 4 tensor variable, y.shape = (1, 3, 1, 3) + out = fluid.layers.pad_constant_like(x=x, y=y, pad_value=0.) + # out is a rank 4 tensor variable, and out.shape = [2, 3 ,2 , 3] + """ + helper = LayerHelper('pad_constant_like', input=x, **locals()) + dtype = helper.input_dtype() + out = helper.create_tmp_variable(dtype) + helper.append_op( + type='pad_constant_like', + inputs={'X': x, + 'Y': y}, + outputs={'Out': out}, + attrs={'pad_value': float(pad_value)}) + return out + + def label_smooth(label, prior_dist=None, epsilon=0.1, @@ -5351,7 +5432,7 @@ def crop(x, shape=None, offsets=None, name=None): helper = LayerHelper('crop', **locals()) if not (isinstance(shape, list) or isinstance(shape, tuple) or \ - isinstance(shape, Variable)): + isinstance(shape, Variable)): raise ValueError("The shape should be a list, tuple or Variable.") if offsets is None: -- GitLab