未验证 提交 3ad67b9a 编写于 作者: G Galaxy1458 提交者: GitHub

test,test=develop (#53938)

上级 706503d0
......@@ -15,11 +15,13 @@
#include "paddle/phi/core/compat/op_utils.h"
namespace phi {
KernelSignature FillOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature FillOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fill", {"X"}, {"value_float"}, {"Out"});
}
KernelSignature FillGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature FillGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"fill_grad", {"Out@GRAD"}, {"value_float"}, {"X@GRAD"});
}
......
......@@ -31,7 +31,7 @@ KernelSignature FlattenOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature FlattenGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"flatten_grad", {"XShape", "Out@GRAD"}, {}, {"X@GRAD"});
}
......
......@@ -18,7 +18,8 @@
namespace phi {
KernelSignature FusedAdamOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature FusedAdamOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
paddle::small_vector<const char*> in_names = {"Params",
"Grads",
"LearningRate",
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature AttentionFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_attention",
{"X",
"LnScale",
......@@ -59,7 +59,7 @@ KernelSignature AttentionFuseOpArgumentMapping(
}
KernelSignature AttentionGradFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_attention_grad",
{"Y@GRAD",
"X",
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature FusedConv2dOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_conv2d",
{"Input", "Filter", "Bias", "ResidualData"},
{"strides",
......@@ -34,7 +34,7 @@ KernelSignature FusedConv2dOpArgumentMapping(
}
KernelSignature FusedConv3dOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_conv3d",
{"Input", "Filter", "Bias", "ResidualData"},
{"strides",
......
......@@ -49,7 +49,7 @@ KernelSignature FusedElementwiseSubOpArgumentMapping(
}
KernelSignature FusedElementwiseMulOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_elementwise_mul",
{"X", "Y"},
{"axis",
......@@ -65,7 +65,7 @@ KernelSignature FusedElementwiseMulOpArgumentMapping(
}
KernelSignature FusedElementwiseDivOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_elementwise_div",
{"X", "Y"},
{"axis",
......
......@@ -14,7 +14,7 @@ limitations under the License. */
namespace phi {
KernelSignature FeedForwardFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_feedforward",
{"X",
"Dropout1Seed",
......@@ -56,7 +56,7 @@ KernelSignature FeedForwardFuseOpArgumentMapping(
}
KernelSignature FeedForwardGradFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_feedforward_grad",
{"Out@GRAD", "X",
"Linear1Weight", "Linear1Bias",
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature FusedMatmulOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_matmul",
{"X", "Y", "ResidualData"},
{"trans_x",
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature SoftmaxMaskFuseGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"fused_softmax_mask_grad", {"Softmax", "Out@GRAD"}, {}, {"X@GRAD"});
}
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature FusedSoftplusOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"fused_softplus",
{"X"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature FusedTransposeOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_transpose",
{"X"},
{"axis",
......
......@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi {
KernelSignature GraphSampleNeighborsOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("graph_sample_neighbors",
{"Row", "Col_Ptr", "X", "Eids", "Perm_Buffer"},
{"sample_size", "return_eids", "flag_perm_buffer"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature GroupNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature GroupNormOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("group_norm",
{"X", "Scale", "Bias"},
{"epsilon", "groups", "data_layout"},
......@@ -24,7 +25,7 @@ KernelSignature GroupNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature GroupNormGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"group_norm_grad",
{"X", "Scale", "Bias", "Y", "Mean", "Variance", "Y@GRAD"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature HierarchicalSigmoidOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("hsigmoid_loss",
{"X", "Label", "W", "Bias", "PathTable", "PathCode"},
{"num_classes", "is_sparse"},
......
......@@ -17,12 +17,12 @@
namespace phi {
KernelSignature IdentityLossOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("identity_loss", {"X"}, {"reduction"}, {"Out"});
}
KernelSignature IdentityLossGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"identity_loss_grad", {"X", "Out@GRAD"}, {"reduction"}, {"X@GRAD"});
}
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature LogsumexpGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("logsumexp_grad",
{"X", "Out", "Out@GRAD"},
{"axis", "keepdim", "reduce_all"},
......
......@@ -31,7 +31,7 @@ KernelSignature MatmulGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature MatmulDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_double_grad",
{"X", "Y", "DOut", "DDX", "DDY"},
{"trans_x", "trans_y"},
......@@ -39,7 +39,7 @@ KernelSignature MatmulDoubleGradOpArgumentMapping(
}
KernelSignature MatmulTripleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"matmul_triple_grad",
{"X", "Y", "DOut", "DDX", "DDY", "D_DX", "D_DY", "D_DDOut"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature MulGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature MulGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_with_flatten_grad",
{"X", "Y", "Out@GRAD"},
{"x_num_col_dims", "y_num_col_dims"},
......@@ -24,7 +25,7 @@ KernelSignature MulGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature MulDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_with_flatten_double_grad",
{"X", "Y", "DOut", "DDX", "DDY"},
{"x_num_col_dims", "y_num_col_dims"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature MultiClassNMS3OpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("multiclass_nms3",
{"BBoxes", "Scores", "RoisNum"},
{"score_threshold",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册