未验证 提交 3ad67b9a 编写于 作者: G Galaxy1458 提交者: GitHub

test,test=develop (#53938)

上级 706503d0
...@@ -15,11 +15,13 @@ ...@@ -15,11 +15,13 @@
#include "paddle/phi/core/compat/op_utils.h" #include "paddle/phi/core/compat/op_utils.h"
namespace phi { namespace phi {
KernelSignature FillOpArgumentMapping(const ArgumentMappingContext& ctx) { KernelSignature FillOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fill", {"X"}, {"value_float"}, {"Out"}); return KernelSignature("fill", {"X"}, {"value_float"}, {"Out"});
} }
KernelSignature FillGradOpArgumentMapping(const ArgumentMappingContext& ctx) { KernelSignature FillGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"fill_grad", {"Out@GRAD"}, {"value_float"}, {"X@GRAD"}); "fill_grad", {"Out@GRAD"}, {"value_float"}, {"X@GRAD"});
} }
......
...@@ -31,7 +31,7 @@ KernelSignature FlattenOpArgumentMapping(const ArgumentMappingContext& ctx) { ...@@ -31,7 +31,7 @@ KernelSignature FlattenOpArgumentMapping(const ArgumentMappingContext& ctx) {
} }
KernelSignature FlattenGradOpArgumentMapping( KernelSignature FlattenGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"flatten_grad", {"XShape", "Out@GRAD"}, {}, {"X@GRAD"}); "flatten_grad", {"XShape", "Out@GRAD"}, {}, {"X@GRAD"});
} }
......
...@@ -18,7 +18,8 @@ ...@@ -18,7 +18,8 @@
namespace phi { namespace phi {
KernelSignature FusedAdamOpArgumentMapping(const ArgumentMappingContext& ctx) { KernelSignature FusedAdamOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
paddle::small_vector<const char*> in_names = {"Params", paddle::small_vector<const char*> in_names = {"Params",
"Grads", "Grads",
"LearningRate", "LearningRate",
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi { namespace phi {
KernelSignature AttentionFuseOpArgumentMapping( KernelSignature AttentionFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_attention", return KernelSignature("fused_attention",
{"X", {"X",
"LnScale", "LnScale",
...@@ -59,7 +59,7 @@ KernelSignature AttentionFuseOpArgumentMapping( ...@@ -59,7 +59,7 @@ KernelSignature AttentionFuseOpArgumentMapping(
} }
KernelSignature AttentionGradFuseOpArgumentMapping( KernelSignature AttentionGradFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_attention_grad", return KernelSignature("fused_attention_grad",
{"Y@GRAD", {"Y@GRAD",
"X", "X",
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature FusedConv2dOpArgumentMapping( KernelSignature FusedConv2dOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_conv2d", return KernelSignature("fused_conv2d",
{"Input", "Filter", "Bias", "ResidualData"}, {"Input", "Filter", "Bias", "ResidualData"},
{"strides", {"strides",
...@@ -34,7 +34,7 @@ KernelSignature FusedConv2dOpArgumentMapping( ...@@ -34,7 +34,7 @@ KernelSignature FusedConv2dOpArgumentMapping(
} }
KernelSignature FusedConv3dOpArgumentMapping( KernelSignature FusedConv3dOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_conv3d", return KernelSignature("fused_conv3d",
{"Input", "Filter", "Bias", "ResidualData"}, {"Input", "Filter", "Bias", "ResidualData"},
{"strides", {"strides",
......
...@@ -49,7 +49,7 @@ KernelSignature FusedElementwiseSubOpArgumentMapping( ...@@ -49,7 +49,7 @@ KernelSignature FusedElementwiseSubOpArgumentMapping(
} }
KernelSignature FusedElementwiseMulOpArgumentMapping( KernelSignature FusedElementwiseMulOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_elementwise_mul", return KernelSignature("fused_elementwise_mul",
{"X", "Y"}, {"X", "Y"},
{"axis", {"axis",
...@@ -65,7 +65,7 @@ KernelSignature FusedElementwiseMulOpArgumentMapping( ...@@ -65,7 +65,7 @@ KernelSignature FusedElementwiseMulOpArgumentMapping(
} }
KernelSignature FusedElementwiseDivOpArgumentMapping( KernelSignature FusedElementwiseDivOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_elementwise_div", return KernelSignature("fused_elementwise_div",
{"X", "Y"}, {"X", "Y"},
{"axis", {"axis",
......
...@@ -14,7 +14,7 @@ limitations under the License. */ ...@@ -14,7 +14,7 @@ limitations under the License. */
namespace phi { namespace phi {
KernelSignature FeedForwardFuseOpArgumentMapping( KernelSignature FeedForwardFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_feedforward", return KernelSignature("fused_feedforward",
{"X", {"X",
"Dropout1Seed", "Dropout1Seed",
...@@ -56,7 +56,7 @@ KernelSignature FeedForwardFuseOpArgumentMapping( ...@@ -56,7 +56,7 @@ KernelSignature FeedForwardFuseOpArgumentMapping(
} }
KernelSignature FeedForwardGradFuseOpArgumentMapping( KernelSignature FeedForwardGradFuseOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_feedforward_grad", return KernelSignature("fused_feedforward_grad",
{"Out@GRAD", "X", {"Out@GRAD", "X",
"Linear1Weight", "Linear1Bias", "Linear1Weight", "Linear1Bias",
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature FusedMatmulOpArgumentMapping( KernelSignature FusedMatmulOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_matmul", return KernelSignature("fused_matmul",
{"X", "Y", "ResidualData"}, {"X", "Y", "ResidualData"},
{"trans_x", {"trans_x",
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi { namespace phi {
KernelSignature SoftmaxMaskFuseGradOpArgumentMapping( KernelSignature SoftmaxMaskFuseGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"fused_softmax_mask_grad", {"Softmax", "Out@GRAD"}, {}, {"X@GRAD"}); "fused_softmax_mask_grad", {"Softmax", "Out@GRAD"}, {}, {"X@GRAD"});
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature FusedSoftplusOpArgumentMapping( KernelSignature FusedSoftplusOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"fused_softplus", "fused_softplus",
{"X"}, {"X"},
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature FusedTransposeOpArgumentMapping( KernelSignature FusedTransposeOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("fused_transpose", return KernelSignature("fused_transpose",
{"X"}, {"X"},
{"axis", {"axis",
......
...@@ -17,7 +17,7 @@ limitations under the License. */ ...@@ -17,7 +17,7 @@ limitations under the License. */
namespace phi { namespace phi {
KernelSignature GraphSampleNeighborsOpArgumentMapping( KernelSignature GraphSampleNeighborsOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("graph_sample_neighbors", return KernelSignature("graph_sample_neighbors",
{"Row", "Col_Ptr", "X", "Eids", "Perm_Buffer"}, {"Row", "Col_Ptr", "X", "Eids", "Perm_Buffer"},
{"sample_size", "return_eids", "flag_perm_buffer"}, {"sample_size", "return_eids", "flag_perm_buffer"},
......
...@@ -16,7 +16,8 @@ ...@@ -16,7 +16,8 @@
namespace phi { namespace phi {
KernelSignature GroupNormOpArgumentMapping(const ArgumentMappingContext& ctx) { KernelSignature GroupNormOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("group_norm", return KernelSignature("group_norm",
{"X", "Scale", "Bias"}, {"X", "Scale", "Bias"},
{"epsilon", "groups", "data_layout"}, {"epsilon", "groups", "data_layout"},
...@@ -24,7 +25,7 @@ KernelSignature GroupNormOpArgumentMapping(const ArgumentMappingContext& ctx) { ...@@ -24,7 +25,7 @@ KernelSignature GroupNormOpArgumentMapping(const ArgumentMappingContext& ctx) {
} }
KernelSignature GroupNormGradOpArgumentMapping( KernelSignature GroupNormGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"group_norm_grad", "group_norm_grad",
{"X", "Scale", "Bias", "Y", "Mean", "Variance", "Y@GRAD"}, {"X", "Scale", "Bias", "Y", "Mean", "Variance", "Y@GRAD"},
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature HierarchicalSigmoidOpArgumentMapping( KernelSignature HierarchicalSigmoidOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("hsigmoid_loss", return KernelSignature("hsigmoid_loss",
{"X", "Label", "W", "Bias", "PathTable", "PathCode"}, {"X", "Label", "W", "Bias", "PathTable", "PathCode"},
{"num_classes", "is_sparse"}, {"num_classes", "is_sparse"},
......
...@@ -17,12 +17,12 @@ ...@@ -17,12 +17,12 @@
namespace phi { namespace phi {
KernelSignature IdentityLossOpArgumentMapping( KernelSignature IdentityLossOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("identity_loss", {"X"}, {"reduction"}, {"Out"}); return KernelSignature("identity_loss", {"X"}, {"reduction"}, {"Out"});
} }
KernelSignature IdentityLossGradOpArgumentMapping( KernelSignature IdentityLossGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"identity_loss_grad", {"X", "Out@GRAD"}, {"reduction"}, {"X@GRAD"}); "identity_loss_grad", {"X", "Out@GRAD"}, {"reduction"}, {"X@GRAD"});
} }
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature LogsumexpGradOpArgumentMapping( KernelSignature LogsumexpGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("logsumexp_grad", return KernelSignature("logsumexp_grad",
{"X", "Out", "Out@GRAD"}, {"X", "Out", "Out@GRAD"},
{"axis", "keepdim", "reduce_all"}, {"axis", "keepdim", "reduce_all"},
......
...@@ -31,7 +31,7 @@ KernelSignature MatmulGradOpArgumentMapping(const ArgumentMappingContext& ctx) { ...@@ -31,7 +31,7 @@ KernelSignature MatmulGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
} }
KernelSignature MatmulDoubleGradOpArgumentMapping( KernelSignature MatmulDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_double_grad", return KernelSignature("matmul_double_grad",
{"X", "Y", "DOut", "DDX", "DDY"}, {"X", "Y", "DOut", "DDX", "DDY"},
{"trans_x", "trans_y"}, {"trans_x", "trans_y"},
...@@ -39,7 +39,7 @@ KernelSignature MatmulDoubleGradOpArgumentMapping( ...@@ -39,7 +39,7 @@ KernelSignature MatmulDoubleGradOpArgumentMapping(
} }
KernelSignature MatmulTripleGradOpArgumentMapping( KernelSignature MatmulTripleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature( return KernelSignature(
"matmul_triple_grad", "matmul_triple_grad",
{"X", "Y", "DOut", "DDX", "DDY", "D_DX", "D_DY", "D_DDOut"}, {"X", "Y", "DOut", "DDX", "DDY", "D_DX", "D_DY", "D_DDOut"},
......
...@@ -16,7 +16,8 @@ ...@@ -16,7 +16,8 @@
namespace phi { namespace phi {
KernelSignature MulGradOpArgumentMapping(const ArgumentMappingContext& ctx) { KernelSignature MulGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_with_flatten_grad", return KernelSignature("matmul_with_flatten_grad",
{"X", "Y", "Out@GRAD"}, {"X", "Y", "Out@GRAD"},
{"x_num_col_dims", "y_num_col_dims"}, {"x_num_col_dims", "y_num_col_dims"},
...@@ -24,7 +25,7 @@ KernelSignature MulGradOpArgumentMapping(const ArgumentMappingContext& ctx) { ...@@ -24,7 +25,7 @@ KernelSignature MulGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
} }
KernelSignature MulDoubleGradOpArgumentMapping( KernelSignature MulDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("matmul_with_flatten_double_grad", return KernelSignature("matmul_with_flatten_double_grad",
{"X", "Y", "DOut", "DDX", "DDY"}, {"X", "Y", "DOut", "DDX", "DDY"},
{"x_num_col_dims", "y_num_col_dims"}, {"x_num_col_dims", "y_num_col_dims"},
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
namespace phi { namespace phi {
KernelSignature MultiClassNMS3OpArgumentMapping( KernelSignature MultiClassNMS3OpArgumentMapping(
const ArgumentMappingContext& ctx) { const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("multiclass_nms3", return KernelSignature("multiclass_nms3",
{"BBoxes", "Scores", "RoisNum"}, {"BBoxes", "Scores", "RoisNum"},
{"score_threshold", {"score_threshold",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册