diff --git a/paddle/phi/ops/compat/activation_sig.cc b/paddle/phi/ops/compat/activation_sig.cc index e754c79ed1b29b09c06768cd0a2684e0be02c52f..483290e609fbf6d0c64c794ee1c6a15b3562681e 100644 --- a/paddle/phi/ops/compat/activation_sig.cc +++ b/paddle/phi/ops/compat/activation_sig.cc @@ -42,28 +42,33 @@ namespace phi { DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(HardTanh, "hardtanh", "t_min" comma "t_max"); DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(Mish, "mish", "threshold"); -KernelSignature SwishGradOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature SwishGradOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("swish_grad", {"X", "Out@GRAD"}, {}, {"X@GRAD"}); } -KernelSignature Relu6GradOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Relu6GradOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("relu6_grad", {"Out", "Out@GRAD"}, {}, {"X@GRAD"}); } KernelSignature HardSwishGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("hardswish_grad", {"X", "Out@GRAD"}, {}, {"X@GRAD"}); } -KernelSignature HardSwishOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature HardSwishOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("hardswish", {"X"}, {}, {"Out"}); } -KernelSignature SwishOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature SwishOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("swish_raw", {"X"}, {"beta"}, {"Out"}); } -KernelSignature Relu6OpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Relu6OpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("relu6_raw", {"X"}, {"threshold"}, {"Out"}); } diff --git a/paddle/phi/ops/compat/batch_norm_sig.cc b/paddle/phi/ops/compat/batch_norm_sig.cc index 5f6efcd9ce769ff9332866d4fcb7a672743c50fd..5e7563b7515c08bd4566207afac22d2c9fc5ebf5 100644 --- a/paddle/phi/ops/compat/batch_norm_sig.cc +++ b/paddle/phi/ops/compat/batch_norm_sig.cc @@ -55,7 +55,7 @@ KernelSignature BatchNormOpArgumentMapping(const ArgumentMappingContext& ctx) { } KernelSignature BatchNormGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("batch_norm_grad", { "X", @@ -78,7 +78,7 @@ KernelSignature BatchNormGradOpArgumentMapping( } KernelSignature BatchNormGradGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("batch_norm_double_grad", {"X", "Scale", diff --git a/paddle/phi/ops/compat/bincount_sig.cc b/paddle/phi/ops/compat/bincount_sig.cc index 35067c256ed495b0e2156bef87c943334e0ef61f..403b891da977fa562266622148ffabd6f8fa1390 100644 --- a/paddle/phi/ops/compat/bincount_sig.cc +++ b/paddle/phi/ops/compat/bincount_sig.cc @@ -16,7 +16,8 @@ namespace phi { -KernelSignature BincountOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature BincountOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("bincount", {"X", "Weights"}, {"minlength"}, {"Out"}); } diff --git a/paddle/phi/ops/compat/cast_sig.cc b/paddle/phi/ops/compat/cast_sig.cc index 326126d7d8fa85a017e9103354ac0b82d09f7a14..b4be2f5a55083ace4067ec15e59cb4635fe71f7b 100644 --- a/paddle/phi/ops/compat/cast_sig.cc +++ b/paddle/phi/ops/compat/cast_sig.cc @@ -16,7 +16,8 @@ limitations under the License. */ namespace phi { -KernelSignature CastOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature CastOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("cast", {"X"}, {"out_dtype"}, {"Out"}); } diff --git a/paddle/phi/ops/compat/channel_shuffle_sig.cc b/paddle/phi/ops/compat/channel_shuffle_sig.cc index ae0aa0a80b6f0b9f7137fd8e8c948712a8be7e1f..d3bf58bdec3c8e9abab80503bd94659c59fa3dbd 100644 --- a/paddle/phi/ops/compat/channel_shuffle_sig.cc +++ b/paddle/phi/ops/compat/channel_shuffle_sig.cc @@ -17,7 +17,7 @@ namespace phi { KernelSignature ChannelShuffleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("channel_shuffle_grad", {"Out@GRAD"}, {"groups", "data_format"}, diff --git a/paddle/phi/ops/compat/conv2d_sig.cc b/paddle/phi/ops/compat/conv2d_sig.cc index 4e63ee19580ed4aebeeef24cecbffdfcf02008b5..04b0e14ecbc139f60f0e3ad329cfb94c6295f3f3 100644 --- a/paddle/phi/ops/compat/conv2d_sig.cc +++ b/paddle/phi/ops/compat/conv2d_sig.cc @@ -16,7 +16,8 @@ namespace phi { -KernelSignature Conv2dOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Conv2dOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d", {"Input", "Filter"}, {"strides", @@ -28,7 +29,8 @@ KernelSignature Conv2dOpArgumentMapping(const ArgumentMappingContext& ctx) { {"Output"}); } -KernelSignature Conv2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Conv2dGradOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", @@ -41,7 +43,7 @@ KernelSignature Conv2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) { } KernelSignature Conv2dDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_double_grad", {"Input", "Filter", "DOutput", "DDInput", "DDFilter"}, {"strides", @@ -53,7 +55,8 @@ KernelSignature Conv2dDoubleGradOpArgumentMapping( {"DInput", "DFilter", "DDOutput"}); } -KernelSignature Conv2dFusionArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Conv2dFusionArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_fusion_cutlass", {"Input", "Filter", "Bias", "ResidualData"}, {"strides", diff --git a/paddle/phi/ops/compat/conv3d_sig.cc b/paddle/phi/ops/compat/conv3d_sig.cc index f08c2eb3ec228bdc8017ec96395b85523457cff8..68797e0823d2f1a05b9196e2093cf1c3fa45a009 100644 --- a/paddle/phi/ops/compat/conv3d_sig.cc +++ b/paddle/phi/ops/compat/conv3d_sig.cc @@ -16,7 +16,8 @@ namespace phi { -KernelSignature Conv3dOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Conv3dOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv3d", {"Input", "Filter"}, { @@ -30,7 +31,8 @@ KernelSignature Conv3dOpArgumentMapping(const ArgumentMappingContext& ctx) { {"Output"}); } -KernelSignature Conv3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature Conv3dGradOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv3d_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", @@ -43,7 +45,7 @@ KernelSignature Conv3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) { } KernelSignature Conv3dDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv3d_double_grad", {"Input", "Filter", "DOutput", "DDInput", "DDFilter"}, {"strides", diff --git a/paddle/phi/ops/compat/conv_fusion_sig.cc b/paddle/phi/ops/compat/conv_fusion_sig.cc index 4cadfe87f53640bb15ac30b660d3c2c274990e10..93d81029abdc257a9238114c74555162ed704025 100644 --- a/paddle/phi/ops/compat/conv_fusion_sig.cc +++ b/paddle/phi/ops/compat/conv_fusion_sig.cc @@ -16,7 +16,8 @@ namespace phi { -KernelSignature ConvFusionOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature ConvFusionOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_fusion", {"Input", "Filter", "Bias", "ResidualData"}, { diff --git a/paddle/phi/ops/compat/conv_transpose_sig.cc b/paddle/phi/ops/compat/conv_transpose_sig.cc index 20714acaae5e8706a11984e0e9c8d7c19811c864..52f04688be898a470db09a6cbb3fd2829818a3c6 100644 --- a/paddle/phi/ops/compat/conv_transpose_sig.cc +++ b/paddle/phi/ops/compat/conv_transpose_sig.cc @@ -17,7 +17,7 @@ namespace phi { KernelSignature Conv2dTransposeOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_transpose", {"Input", "Filter"}, {"strides", @@ -32,7 +32,7 @@ KernelSignature Conv2dTransposeOpArgumentMapping( } KernelSignature Conv2dTransposeGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_transpose_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", @@ -47,7 +47,7 @@ KernelSignature Conv2dTransposeGradOpArgumentMapping( } KernelSignature Conv2dTransposeDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv2d_transpose_double_grad", {"Input", "Filter", "DOutput", "DDInput", "DDFilter"}, {"strides", @@ -62,7 +62,7 @@ KernelSignature Conv2dTransposeDoubleGradOpArgumentMapping( } KernelSignature Conv3dTransposeOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv3d_transpose", {"Input", "Filter"}, {"strides", @@ -77,7 +77,7 @@ KernelSignature Conv3dTransposeOpArgumentMapping( } KernelSignature Conv3dTransposeGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("conv3d_transpose_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", @@ -92,7 +92,7 @@ KernelSignature Conv3dTransposeGradOpArgumentMapping( } KernelSignature DepthwiseConv2dTransposeOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("depthwise_conv2d_transpose", {"Input", "Filter"}, {"strides", @@ -107,7 +107,7 @@ KernelSignature DepthwiseConv2dTransposeOpArgumentMapping( } KernelSignature DepthwiseConv2dTransposeGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("depthwise_conv2d_transpose_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", diff --git a/paddle/phi/ops/compat/cumsum_sig.cc b/paddle/phi/ops/compat/cumsum_sig.cc index 00992b15435d2153ccd38d95689ce9e1ee9f31bc..c8fbcdec9026b6deb4445206fc8b14ff36490e9c 100644 --- a/paddle/phi/ops/compat/cumsum_sig.cc +++ b/paddle/phi/ops/compat/cumsum_sig.cc @@ -15,7 +15,8 @@ namespace phi { -KernelSignature CumsumOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature CumsumOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("cumsum_grad", {"X", "Out@GRAD"}, {"axis", "flatten", "exclusive", "reverse"}, diff --git a/paddle/phi/ops/compat/deformable_conv_sig.cc b/paddle/phi/ops/compat/deformable_conv_sig.cc index f7c5f4d6a34fe49e1521a2f102e057992e5aef04..63e726679b6a12935bf5620c627e506e2c42bbdf 100644 --- a/paddle/phi/ops/compat/deformable_conv_sig.cc +++ b/paddle/phi/ops/compat/deformable_conv_sig.cc @@ -17,7 +17,7 @@ namespace phi { KernelSignature DeformableConvOpV1ArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("deformable_conv", {"Input", "Offset", "Filter", "Mask"}, {"strides", @@ -30,7 +30,7 @@ KernelSignature DeformableConvOpV1ArgumentMapping( } KernelSignature DeformableConvGradOpV1ArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "deformable_conv_grad", {"Input", "Offset", "Filter", "Mask", "Output@GRAD"}, diff --git a/paddle/phi/ops/compat/depthwise_conv2d_sig.cc b/paddle/phi/ops/compat/depthwise_conv2d_sig.cc index 08ff91c2cae5f11c9a841338c93e73fabc34be23..175f9432158bc4215557e8dda5a1af70f21a314c 100644 --- a/paddle/phi/ops/compat/depthwise_conv2d_sig.cc +++ b/paddle/phi/ops/compat/depthwise_conv2d_sig.cc @@ -17,7 +17,7 @@ namespace phi { KernelSignature DepthwiseConv2dOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("depthwise_conv2d", {"Input", "Filter"}, {"strides", @@ -30,7 +30,7 @@ KernelSignature DepthwiseConv2dOpArgumentMapping( } KernelSignature DepthwiseConv2dGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("depthwise_conv2d_grad", {"Input", "Filter", "Output@GRAD"}, {"strides", @@ -43,7 +43,7 @@ KernelSignature DepthwiseConv2dGradOpArgumentMapping( } KernelSignature DepthwiseConv2dDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("depthwise_conv2d_double_grad", {"Input", "Filter", "DOutput", "DDInput", "DDFilter"}, {"strides", diff --git a/paddle/phi/ops/compat/distribute_fpn_proposals_sig.cc b/paddle/phi/ops/compat/distribute_fpn_proposals_sig.cc index ad02fb1aa09c4b7cb07b44267a765c5572bc87ad..51fc482b64486d74a9fcd05745b7056af89d5cc5 100644 --- a/paddle/phi/ops/compat/distribute_fpn_proposals_sig.cc +++ b/paddle/phi/ops/compat/distribute_fpn_proposals_sig.cc @@ -17,7 +17,7 @@ limitations under the License. */ namespace phi { KernelSignature DistributeFpnProposalsOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "distribute_fpn_proposals", {"FpnRois", "RoisNum"}, diff --git a/paddle/phi/ops/compat/dropout_sig.cc b/paddle/phi/ops/compat/dropout_sig.cc index 403e752ca0e83b872513d3d3478c4c371c46c6a2..2a5cf5482383bf9b66e86883d1570a9b3d3a7735 100644 --- a/paddle/phi/ops/compat/dropout_sig.cc +++ b/paddle/phi/ops/compat/dropout_sig.cc @@ -16,7 +16,8 @@ namespace phi { -KernelSignature DropoutOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature DropoutOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "dropout", {"X", "Seed"}, @@ -25,14 +26,15 @@ KernelSignature DropoutOpArgumentMapping(const ArgumentMappingContext& ctx) { } KernelSignature DropoutGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("dropout_grad", {"Mask", "Out@GRAD"}, {"dropout_prob", "is_test", "dropout_implementation"}, {"X@GRAD"}); } -KernelSignature DropoutNdOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature DropoutNdOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("dropout_nd", {"X", "Seed"}, {"dropout_prob", @@ -45,7 +47,7 @@ KernelSignature DropoutNdOpArgumentMapping(const ArgumentMappingContext& ctx) { } KernelSignature DropoutNdGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "dropout_nd_grad", {"Mask", "Out@GRAD"}, diff --git a/paddle/phi/ops/compat/einsum_sig.cc b/paddle/phi/ops/compat/einsum_sig.cc index 4fd31c1a2d84211ed39ae372a34e54e6971d616a..3876a9b7c5766e2758df4363e7cadcc5557c728e 100644 --- a/paddle/phi/ops/compat/einsum_sig.cc +++ b/paddle/phi/ops/compat/einsum_sig.cc @@ -16,12 +16,14 @@ limitations under the License. */ namespace phi { -KernelSignature EinsumOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature EinsumOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "einsum", {"Operands"}, {"equation"}, {"Out", "InnerCache", "XShape"}); } -KernelSignature EinsumGradOpArgumentMapping(const ArgumentMappingContext& ctx) { +KernelSignature EinsumGradOpArgumentMapping( + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("einsum_grad", {"Operands", "InnerCache", "Out@GRAD"}, {"equation"}, diff --git a/paddle/phi/ops/compat/elementwise_sig.cc b/paddle/phi/ops/compat/elementwise_sig.cc index 1906b0a220bb7963325ead0311d2ca723942b71d..b303c7e7a36690201e98e23a650c74f1186961e4 100644 --- a/paddle/phi/ops/compat/elementwise_sig.cc +++ b/paddle/phi/ops/compat/elementwise_sig.cc @@ -26,7 +26,7 @@ KernelSignature ElementwiseAddOpArgumentMapping( } KernelSignature ElementwiseGradAddOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("grad_add", {"X", "Y"}, {}, {"Out"}); } @@ -101,12 +101,12 @@ KernelSignature ElementwiseFloorDivOpArgumentMapping( } KernelSignature ElementwiseHeavisideOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("heaviside", {"X", "Y"}, {}, {"Out"}); } KernelSignature ElementwisePowOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { int axis = paddle::any_cast(ctx.Attr("axis")); if (axis == -1) { return KernelSignature("elementwise_pow", {"X", "Y"}, {}, {"Out"}); @@ -115,19 +115,19 @@ KernelSignature ElementwisePowOpArgumentMapping( } KernelSignature ElementwiseAddGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "add_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseAddDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "add_double_grad", {"Y", "DOut", "DDX", "DDY"}, {"axis"}, {"DDOut"}); } KernelSignature ElementwiseAddTripleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("add_triple_grad", {"DDX", "DDY", "D_DDOut"}, {"axis"}, @@ -135,19 +135,19 @@ KernelSignature ElementwiseAddTripleGradOpArgumentMapping( } KernelSignature ElementwiseSubGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "subtract_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseSubDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "subtract_double_grad", {"Y", "DOut", "DDX", "DDY"}, {"axis"}, {"DDOut"}); } KernelSignature ElementwiseDivGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("divide_grad", {"X", "Y", "Out", "Out@GRAD"}, {"axis"}, @@ -155,13 +155,13 @@ KernelSignature ElementwiseDivGradOpArgumentMapping( } KernelSignature ElementwiseFMinGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "fmin_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseDivDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("divide_double_grad", {"Y", "Out", "DX", "DDX", "DDY"}, {"axis"}, @@ -169,29 +169,29 @@ KernelSignature ElementwiseDivDoubleGradOpArgumentMapping( } KernelSignature ElementwiseMulGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "multiply_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseFMaxOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("fmax", {"X", "Y"}, {}, {"Out"}); } KernelSignature ElementwiseFMinOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("fmin", {"X", "Y"}, {}, {"Out"}); } KernelSignature ElementwiseFMaxGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "fmax_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseMulDoubleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("multiply_double_grad", {"X", "Y", "DOut", "DDX", "DDY"}, {"axis"}, @@ -199,7 +199,7 @@ KernelSignature ElementwiseMulDoubleGradOpArgumentMapping( } KernelSignature ElementwiseMulTripleGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "multiply_triple_grad", {"X", "Y", "DOut", "DDX", "DDY", "D_DX", "D_DY", "D_DDOut"}, @@ -208,25 +208,25 @@ KernelSignature ElementwiseMulTripleGradOpArgumentMapping( } KernelSignature ElementwiseMaxGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "maximum_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseMinGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "minimum_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwiseHeavisideGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "heaviside_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } KernelSignature ElementwisePowGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature( "elementwise_pow_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"}); } diff --git a/paddle/phi/ops/compat/exponential_sig.cc b/paddle/phi/ops/compat/exponential_sig.cc index 2d70a4200ab3ca1c70079911e841899c9ac21fff..79bad59184233e2a19165a3a7363bd6322ba20af 100644 --- a/paddle/phi/ops/compat/exponential_sig.cc +++ b/paddle/phi/ops/compat/exponential_sig.cc @@ -17,7 +17,7 @@ limitations under the License. */ namespace phi { KernelSignature ExponentialOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("exponential", {"X"}, {"lambda"}, {"Out"}); } diff --git a/paddle/phi/ops/compat/fill_any_like_sig.cc b/paddle/phi/ops/compat/fill_any_like_sig.cc index 84af155d402d6ba8034f5e65a9b9b6e0d74ffff4..e49bb2a4de3ce0a6804ef449d7755deee411e46a 100644 --- a/paddle/phi/ops/compat/fill_any_like_sig.cc +++ b/paddle/phi/ops/compat/fill_any_like_sig.cc @@ -17,7 +17,7 @@ limitations under the License. */ namespace phi { KernelSignature FillAnyLikeOpArgumentMapping( - const ArgumentMappingContext& ctx) { + const ArgumentMappingContext& ctx UNUSED) { return KernelSignature("full_like", {"X"}, {"value", "dtype"}, {"Out"}); }