未验证 提交 6179a3ec 编写于 作者: G Galaxy1458 提交者: GitHub

test,test=develop (#53931)

上级 63ffd733
......@@ -16,12 +16,14 @@
namespace phi {
KernelSignature NormOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature NormOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"norm", {"X"}, {"axis", "epsilon", "is_test"}, {"Out", "Norm"});
}
KernelSignature NormGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature NormGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("norm_grad",
{"X", "Norm", "Out@GRAD"},
{"axis", "epsilon", "is_test"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature PSendOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PSendOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("p_send", {"x"}, {"peer", "dynamic_shape"}, {});
}
......
......@@ -17,7 +17,8 @@
namespace phi {
KernelSignature PadGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PadGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"pad_grad", {"Out@GRAD"}, {"paddings", "pad_value"}, {"X@GRAD"});
}
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature PixelUnshuffleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pixel_unshuffle_grad",
{"Out@GRAD"},
{"downscale_factor", "data_format"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature Pool2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool2dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d",
{"X"},
{"ksize",
......@@ -32,7 +33,8 @@ KernelSignature Pool2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Out"});
}
KernelSignature Pool2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool2dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d_grad",
{"X", "Out", "Out@GRAD"},
{"ksize",
......@@ -49,7 +51,7 @@ KernelSignature Pool2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature Pool2dDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d_double_grad",
{"X"},
{"ksize",
......@@ -65,7 +67,8 @@ KernelSignature Pool2dDoubleGradOpArgumentMapping(
{"Out"});
}
KernelSignature Pool3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool3dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool3d",
{"X"},
{"ksize",
......@@ -81,7 +84,8 @@ KernelSignature Pool3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Out"});
}
KernelSignature Pool3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool3dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool3d_grad",
{"X", "Out", "Out@GRAD"},
{"ksize",
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature Pow2DecayWithLinearWarmupOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pow2_decay_with_linear_warmup",
{"LearningRate", "Step"},
{"warmup_steps", "total_steps", "base_lr", "end_lr"},
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature PriorBoxOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PriorBoxOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("prior_box",
{"Input", "Image"},
{"min_sizes",
......
......@@ -25,7 +25,7 @@ KernelSignature PsroiPoolOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature PsroiPoolGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"psroi_pool_grad",
{"X", "ROIs", "RoisNum", "Out@GRAD"},
......
......@@ -160,7 +160,7 @@ KernelSignature ReduceAllOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature ReduceSumGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sum_grad",
{"X", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -168,7 +168,7 @@ KernelSignature ReduceSumGradOpArgumentMapping(
}
KernelSignature ReduceMeanGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("mean_grad",
{"X", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -176,7 +176,7 @@ KernelSignature ReduceMeanGradOpArgumentMapping(
}
KernelSignature ReduceMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("max_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -184,7 +184,7 @@ KernelSignature ReduceMaxGradOpArgumentMapping(
}
KernelSignature ReduceAMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("amax_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -192,7 +192,7 @@ KernelSignature ReduceAMaxGradOpArgumentMapping(
}
KernelSignature ReduceMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("min_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -200,7 +200,7 @@ KernelSignature ReduceMinGradOpArgumentMapping(
}
KernelSignature ReduceAMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("amin_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......@@ -208,7 +208,7 @@ KernelSignature ReduceAMinGradOpArgumentMapping(
}
KernelSignature ReduceProdGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("prod_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
......
......@@ -38,12 +38,12 @@ KernelSignature ReshapeOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature ReshapeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("reshape_grad", {"Out@GRAD"}, {}, {"X@GRAD"});
}
KernelSignature ReshapeDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("reshape_double_grad", {"DOut", "DDX"}, {}, {"DDOut"});
}
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature RoiAlignOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RoiAlignOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_align",
{"X", "ROIs", "RoisNum"},
{"pooled_height",
......@@ -28,7 +29,7 @@ KernelSignature RoiAlignOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature RoiAlignGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_align_grad",
{"X", "ROIs", "RoisNum", "Out@GRAD"},
{"pooled_height",
......
......@@ -16,7 +16,8 @@
namespace phi {
KernelSignature RoiPoolOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RoiPoolOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_pool",
{"X", "ROIs", "RoisNum"},
{"pooled_height", "pooled_width", "spatial_scale"},
......@@ -24,7 +25,7 @@ KernelSignature RoiPoolOpArgumentMapping(const ArgumentMappingContext& ctx) {
}
KernelSignature RoiPoolOpGradArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_pool_grad",
{"X", "ROIs", "RoisNum", "Argmax", "Out@GRAD"},
{"pooled_height", "pooled_width", "spatial_scale"},
......
......@@ -16,13 +16,14 @@
namespace phi {
KernelSignature RReluOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RReluOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"rrelu", {"X"}, {"lower", "upper", "is_test"}, {"Out", "Noise"});
}
KernelSignature RReluGradGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"rrelu_grad", {"X", "Noise", "Out@GRAD"}, {}, {"X@GRAD"});
}
......
......@@ -14,7 +14,7 @@ limitations under the License. */
namespace phi {
KernelSignature SequenceMaskOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"sequence_mask", {"X", "MaxLenTensor"}, {"maxlen", "out_dtype"}, {"Y"});
}
......
......@@ -14,7 +14,7 @@ limitations under the License. */
namespace phi {
KernelSignature SequencePoolOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sequence_pool",
{"X"},
{"is_test", "pooltype", "pad_value"},
......@@ -22,7 +22,7 @@ KernelSignature SequencePoolOpArgumentMapping(
}
KernelSignature SequencePoolGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sequence_pool_grad",
{"X", "MaxIndex", "Out@GRAD"},
{"is_test", "pooltype", "pad_value"},
......
......@@ -17,7 +17,7 @@
namespace phi {
KernelSignature SyncBatchNormOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sync_batch_norm",
{"X", "Mean", "Variance", "Scale", "Bias"},
{"is_test",
......@@ -35,7 +35,7 @@ KernelSignature SyncBatchNormOpArgumentMapping(
}
KernelSignature SyncBatchNormGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sync_batch_norm_grad",
{
"X",
......
......@@ -16,12 +16,13 @@
namespace phi {
KernelSignature TransposeOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature TransposeOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("transpose", {"X"}, {"axis"}, {"Out"});
}
KernelSignature TransposeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("transpose_grad", {"Out@GRAD"}, {"axis"}, {"X@GRAD"});
}
......
......@@ -16,12 +16,13 @@ limitations under the License. */
namespace phi {
KernelSignature TrilTriuOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature TrilTriuOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("tril_triu", {"X"}, {"diagonal", "lower"}, {"Out"});
}
KernelSignature TrilTriuGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"tril_triu_grad", {"Out@GRAD"}, {"diagonal", "lower"}, {"X@GRAD"});
}
......
......@@ -18,7 +18,7 @@
#include "paddle/phi/core/kernel_registry.h"
#include "paddle/phi/kernels/declarations.h"
int main(int argc, char** argv) {
int main(int argc UNUSED, char** argv UNUSED) {
std::cout << phi::KernelFactory::Instance() << std::endl;
return 0;
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册