未验证 提交 2f42cb7f 编写于 作者: G Galaxy1458 提交者: GitHub

test,test=develop (#53945)

上级 b922e711
......@@ -42,28 +42,33 @@ namespace phi {
DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(HardTanh, "hardtanh", "t_min" comma "t_max");
DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(Mish, "mish", "threshold");
KernelSignature SwishGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature SwishGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("swish_grad", {"X", "Out@GRAD"}, {}, {"X@GRAD"});
}
KernelSignature Relu6GradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Relu6GradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("relu6_grad", {"Out", "Out@GRAD"}, {}, {"X@GRAD"});
}
KernelSignature HardSwishGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("hardswish_grad", {"X", "Out@GRAD"}, {}, {"X@GRAD"});
}
KernelSignature HardSwishOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature HardSwishOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("hardswish", {"X"}, {}, {"Out"});
}
KernelSignature SwishOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature SwishOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("swish_raw", {"X"}, {"beta"}, {"Out"});
}
KernelSignature Relu6OpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Relu6OpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("relu6_raw", {"X"}, {"threshold"}, {"Out"});
}
......
......@@ -55,7 +55,7 @@ KernelSignature BatchNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature BatchNormGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("batch_norm_grad",
{
"X",
......@@ -78,7 +78,7 @@ KernelSignature BatchNormGradOpArgumentMapping(
}
KernelSignature BatchNormGradGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("batch_norm_double_grad",
{"X",
"Scale",
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature BincountOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature BincountOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("bincount", {"X", "Weights"}, {"minlength"}, {"Out"});
}
......
......@@ -16,7 +16,8 @@ limitations under the License. */
namespace phi {
KernelSignature CastOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature CastOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("cast", {"X"}, {"out_dtype"}, {"Out"});
}
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature ChannelShuffleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("channel_shuffle_grad",
{"Out@GRAD"},
{"groups", "data_format"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature Conv2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Conv2dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d",
{"Input", "Filter"},
{"strides",
......@@ -28,7 +29,8 @@ KernelSignature Conv2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Output"});
}
KernelSignature Conv2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Conv2dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......@@ -41,7 +43,7 @@ KernelSignature Conv2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature Conv2dDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_double_grad",
{"Input", "Filter", "DOutput", "DDInput", "DDFilter"},
{"strides",
......@@ -53,7 +55,8 @@ KernelSignature Conv2dDoubleGradOpArgumentMapping(
{"DInput", "DFilter", "DDOutput"});
}
KernelSignature Conv2dFusionArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Conv2dFusionArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_fusion_cutlass",
{"Input", "Filter", "Bias", "ResidualData"},
{"strides",
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature Conv3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Conv3dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv3d",
{"Input", "Filter"},
{
......@@ -30,7 +31,8 @@ KernelSignature Conv3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Output"});
}
KernelSignature Conv3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Conv3dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv3d_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......@@ -43,7 +45,7 @@ KernelSignature Conv3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature Conv3dDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv3d_double_grad",
{"Input", "Filter", "DOutput", "DDInput", "DDFilter"},
{"strides",
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature ConvFusionOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature ConvFusionOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_fusion",
{"Input", "Filter", "Bias", "ResidualData"},
{
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature Conv2dTransposeOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_transpose",
{"Input", "Filter"},
{"strides",
......@@ -32,7 +32,7 @@ KernelSignature Conv2dTransposeOpArgumentMapping(
}
KernelSignature Conv2dTransposeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_transpose_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......@@ -47,7 +47,7 @@ KernelSignature Conv2dTransposeGradOpArgumentMapping(
}
KernelSignature Conv2dTransposeDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv2d_transpose_double_grad",
{"Input", "Filter", "DOutput", "DDInput", "DDFilter"},
{"strides",
......@@ -62,7 +62,7 @@ KernelSignature Conv2dTransposeDoubleGradOpArgumentMapping(
}
KernelSignature Conv3dTransposeOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv3d_transpose",
{"Input", "Filter"},
{"strides",
......@@ -77,7 +77,7 @@ KernelSignature Conv3dTransposeOpArgumentMapping(
}
KernelSignature Conv3dTransposeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("conv3d_transpose_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......@@ -92,7 +92,7 @@ KernelSignature Conv3dTransposeGradOpArgumentMapping(
}
KernelSignature DepthwiseConv2dTransposeOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("depthwise_conv2d_transpose",
{"Input", "Filter"},
{"strides",
......@@ -107,7 +107,7 @@ KernelSignature DepthwiseConv2dTransposeOpArgumentMapping(
}
KernelSignature DepthwiseConv2dTransposeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("depthwise_conv2d_transpose_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......
......@@ -15,7 +15,8 @@
namespace phi {
KernelSignature CumsumOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature CumsumOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("cumsum_grad",
{"X", "Out@GRAD"},
{"axis", "flatten", "exclusive", "reverse"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature DeformableConvOpV1ArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("deformable_conv",
{"Input", "Offset", "Filter", "Mask"},
{"strides",
......@@ -30,7 +30,7 @@ KernelSignature DeformableConvOpV1ArgumentMapping(
}
KernelSignature DeformableConvGradOpV1ArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"deformable_conv_grad",
{"Input", "Offset", "Filter", "Mask", "Output@GRAD"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature DepthwiseConv2dOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("depthwise_conv2d",
{"Input", "Filter"},
{"strides",
......@@ -30,7 +30,7 @@ KernelSignature DepthwiseConv2dOpArgumentMapping(
}
KernelSignature DepthwiseConv2dGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("depthwise_conv2d_grad",
{"Input", "Filter", "Output@GRAD"},
{"strides",
......@@ -43,7 +43,7 @@ KernelSignature DepthwiseConv2dGradOpArgumentMapping(
}
KernelSignature DepthwiseConv2dDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("depthwise_conv2d_double_grad",
{"Input", "Filter", "DOutput", "DDInput", "DDFilter"},
{"strides",
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature DistributeFpnProposalsOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"distribute_fpn_proposals",
{"FpnRois", "RoisNum"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature DropoutOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature DropoutOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"dropout",
{"X", "Seed"},
......@@ -25,14 +26,15 @@ KernelSignature DropoutOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature DropoutGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("dropout_grad",
{"Mask", "Out@GRAD"},
{"dropout_prob", "is_test", "dropout_implementation"},
{"X@GRAD"});
}
KernelSignature DropoutNdOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature DropoutNdOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("dropout_nd",
{"X", "Seed"},
{"dropout_prob",
......@@ -45,7 +47,7 @@ KernelSignature DropoutNdOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature DropoutNdGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"dropout_nd_grad",
{"Mask", "Out@GRAD"},
......
......@@ -16,12 +16,14 @@ limitations under the License. */
namespace phi {
KernelSignature EinsumOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature EinsumOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"einsum", {"Operands"}, {"equation"}, {"Out", "InnerCache", "XShape"});
}
KernelSignature EinsumGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature EinsumGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("einsum_grad",
{"Operands", "InnerCache", "Out@GRAD"},
{"equation"},
......
......@@ -26,7 +26,7 @@ KernelSignature ElementwiseAddOpArgumentMapping(
}
KernelSignature ElementwiseGradAddOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("grad_add", {"X", "Y"}, {}, {"Out"});
}
......@@ -101,12 +101,12 @@ KernelSignature ElementwiseFloorDivOpArgumentMapping(
}
KernelSignature ElementwiseHeavisideOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("heaviside", {"X", "Y"}, {}, {"Out"});
}
KernelSignature ElementwisePowOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
int axis = paddle::any_cast<int>(ctx.Attr("axis"));
if (axis == -1) {
return KernelSignature("elementwise_pow", {"X", "Y"}, {}, {"Out"});
......@@ -115,19 +115,19 @@ KernelSignature ElementwisePowOpArgumentMapping(
}
KernelSignature ElementwiseAddGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"add_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseAddDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"add_double_grad", {"Y", "DOut", "DDX", "DDY"}, {"axis"}, {"DDOut"});
}
KernelSignature ElementwiseAddTripleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("add_triple_grad",
{"DDX", "DDY", "D_DDOut"},
{"axis"},
......@@ -135,19 +135,19 @@ KernelSignature ElementwiseAddTripleGradOpArgumentMapping(
}
KernelSignature ElementwiseSubGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"subtract_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseSubDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"subtract_double_grad", {"Y", "DOut", "DDX", "DDY"}, {"axis"}, {"DDOut"});
}
KernelSignature ElementwiseDivGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("divide_grad",
{"X", "Y", "Out", "Out@GRAD"},
{"axis"},
......@@ -155,13 +155,13 @@ KernelSignature ElementwiseDivGradOpArgumentMapping(
}
KernelSignature ElementwiseFMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"fmin_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseDivDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("divide_double_grad",
{"Y", "Out", "DX", "DDX", "DDY"},
{"axis"},
......@@ -169,29 +169,29 @@ KernelSignature ElementwiseDivDoubleGradOpArgumentMapping(
}
KernelSignature ElementwiseMulGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"multiply_grad", {"X", "Y", "Out@GRAD"}, {"axis"}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseFMaxOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fmax", {"X", "Y"}, {}, {"Out"});
}
KernelSignature ElementwiseFMinOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fmin", {"X", "Y"}, {}, {"Out"});
}
KernelSignature ElementwiseFMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"fmax_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseMulDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("multiply_double_grad",
{"X", "Y", "DOut", "DDX", "DDY"},
{"axis"},
......@@ -199,7 +199,7 @@ KernelSignature ElementwiseMulDoubleGradOpArgumentMapping(
}
KernelSignature ElementwiseMulTripleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"multiply_triple_grad",
{"X", "Y", "DOut", "DDX", "DDY", "D_DX", "D_DY", "D_DDOut"},
......@@ -208,25 +208,25 @@ KernelSignature ElementwiseMulTripleGradOpArgumentMapping(
}
KernelSignature ElementwiseMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"maximum_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"minimum_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwiseHeavisideGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"heaviside_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
KernelSignature ElementwisePowGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"elementwise_pow_grad", {"X", "Y", "Out@GRAD"}, {}, {"X@GRAD", "Y@GRAD"});
}
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature ExponentialOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("exponential", {"X"}, {"lambda"}, {"Out"});
}
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature FillAnyLikeOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("full_like", {"X"}, {"value", "dtype"}, {"Out"});
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册