unary.h 26.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
/* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

// See Note [ Why still include the fluid headers? ]
18
#include "paddle/phi/common/int_array.h"
19 20
#include "paddle/phi/common/scalar.h"
#include "paddle/phi/core/meta_tensor.h"
21

22
namespace phi {
23

24 25
class MetaConfig;

26
// Common InferMeta Functions for unary operators, The format like:
27
//
28 29
//   void [FunctionDesc|OpName]InferMeta(const MetaTensor& x, ..., MetaTensor*
//   out) {}
30 31 32 33
//
// NOTE: The name "InferShape" may be not appropriate. "InferMeta" may be good.
// Because functions in this file not only can infer shape, but also need
// infer lod or other useful data.
34 35
//
// The InferMeta Functions in this file are arranged in alphabetic order.
36

37 38 39 40 41
void AffineGridInferMeta(const MetaTensor& input,
                         const IntArray& outputShape,
                         bool align_corners,
                         MetaTensor* output);

Z
zyfncg 已提交
42
void ArgMinMaxInferMeta(const MetaTensor& x,
43
                        const Scalar& axis,
Z
zyfncg 已提交
44 45 46 47 48 49
                        bool keepdims,
                        bool flatten,
                        int dtype,
                        MetaTensor* out,
                        MetaConfig config = MetaConfig());

L
Linjie Chen 已提交
50 51 52 53 54 55
void ArgsortInferMeta(const MetaTensor& input,
                      int axis,
                      bool descending,
                      MetaTensor* output,
                      MetaTensor* indices);

56 57
void AsRealInferMeta(const MetaTensor& input, MetaTensor* output);

58 59
void AsComplexInferMeta(const MetaTensor& input, MetaTensor* output);

60 61 62 63 64 65
void BatchSizeLikeInferMeta(const MetaTensor& x,
                            const std::vector<int>& shape,
                            int x_batch_size_dim,
                            int out_batch_size_dim,
                            MetaTensor* out);

66
void CastInferMeta(const MetaTensor& x, DataType out_dtype, MetaTensor* out);
67

68 69 70 71 72
void ChannelShuffleInferMeta(const MetaTensor& x,
                             int groups,
                             const std::string& data_format,
                             MetaTensor* out);

73 74
void CholeskyInferMeta(const MetaTensor& x, bool upper, MetaTensor* out);

75 76 77 78 79 80 81 82 83 84 85
void ClassCenterSampleInferMeta(const MetaTensor& label,
                                int num_classes,
                                int num_samples,
                                int ring_id,
                                int rank,
                                int nranks,
                                bool fix_seed,
                                int seed,
                                MetaTensor* remapped_label,
                                MetaTensor* sampled_local_class_center);

L
lyq 已提交
86 87
void ClipByNormInferMeta(const MetaTensor& x, float max_norm, MetaTensor* out);

88
void CreateLikeInferMeta(const MetaTensor& x, DataType dtype, MetaTensor* out);
89

90 91 92 93 94
void CropInferMeta(const MetaTensor& x,
                   const IntArray& shape,
                   const IntArray& offsets,
                   MetaTensor* out,
                   MetaConfig config = MetaConfig());
95

96 97 98 99 100 101
void CumInferMeta(const MetaTensor& x,
                  int axis,
                  bool flatten,
                  bool exclusive,
                  bool reverse,
                  MetaTensor* out);
W
WangZhen 已提交
102 103 104 105 106 107 108

void CumScalarAxisInferMeta(const MetaTensor& x,
                            const Scalar& axis,
                            bool flatten,
                            bool exclusive,
                            bool reverse,
                            MetaTensor* out);
109

W
wuyefeilin 已提交
110 111 112 113
void DecodeJpegInferMeta(const MetaTensor& x,
                         const std::string& mode,
                         MetaTensor* out);

114 115 116
void DiagEmbedInferMeta(
    const MetaTensor& x, int offset, int dim1, int dim2, MetaTensor* out);

Z
zyfncg 已提交
117 118 119 120 121 122 123 124
void DiagInferMeta(const MetaTensor& x,
                   int offset,
                   float padding_value,
                   MetaTensor* out);

void DiagonalInferMeta(
    const MetaTensor& input, int offset, int axis1, int axis2, MetaTensor* out);

125 126
void DirichletInferMeta(const MetaTensor& alpha, MetaTensor* out);

127 128
void EigInferMeta(const MetaTensor& x, MetaTensor* out_w, MetaTensor* out_v);

Z
zyfncg 已提交
129 130 131 132 133
void EighInferMeta(const MetaTensor& x,
                   const std::string& uplo,
                   MetaTensor* out_w,
                   MetaTensor* out_v);

R
Ruibiao Chen 已提交
134 135 136 137
void EigvalsInferMeta(const MetaTensor& x,
                      MetaTensor* out,
                      MetaConfig config = MetaConfig());

138 139 140 141 142 143
void EigvalshInferMeta(const MetaTensor& x,
                       const std::string& uplo,
                       bool is_test,
                       MetaTensor* out_w,
                       MetaTensor* out_v);

144 145
void EinsumInferMeta(const std::vector<const MetaTensor*>& inputs,
                     const std::string& equation,
146 147 148 149 150 151 152
                     MetaTensor* out);

void EinsumRawInferMeta(const std::vector<const MetaTensor*>& inputs,
                        const std::string& equation,
                        MetaTensor* out,
                        std::vector<MetaTensor*> inner_cache,
                        std::vector<MetaTensor*> xshape);
153

H
hong 已提交
154 155 156 157
void ExpandInferMeta(const MetaTensor& x,
                     const IntArray& shape,
                     MetaTensor* out);

Z
zhiboniu 已提交
158 159 160
void FillDiagonalInferMeta(
    const MetaTensor& x, float value, int offset, bool wrap, MetaTensor* out);

F
Feiyu Chan 已提交
161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183
void FFTC2CInferMeta(const MetaTensor& x,
                     const std::vector<int64_t>& axes,
                     const std::string& normalization,
                     bool forward,
                     MetaTensor* out,
                     MetaConfig = MetaConfig());

void FFTC2RInferMeta(const MetaTensor& x,
                     const std::vector<int64_t>& axes,
                     const std::string& normalization,
                     bool forward,
                     int64_t last_dim_size,
                     MetaTensor* out,
                     MetaConfig = MetaConfig());

void FFTR2CInferMeta(const MetaTensor& x,
                     const std::vector<int64_t>& axes,
                     const std::string& normalization,
                     bool forward,
                     bool onesided,
                     MetaTensor* out,
                     MetaConfig = MetaConfig());

Z
zyfncg 已提交
184 185 186 187 188
void FlattenInferMeta(const MetaTensor& x,
                      int start_axis,
                      int stop_axis,
                      MetaTensor* out);

189 190 191 192 193 194
void FlattenWithXShapeInferMeta(const MetaTensor& x,
                                int start_axis,
                                int stop_axis,
                                MetaTensor* out,
                                MetaTensor* xshape);

195 196 197 198
void FlipInferMeta(const MetaTensor& x,
                   const std::vector<int>& axis,
                   MetaTensor* out);

199 200 201 202 203 204 205 206
void FoldInferMeta(const MetaTensor& x,
                   const std::vector<int>& output_sizes,
                   const std::vector<int>& kernel_sizes,
                   const std::vector<int>& strides,
                   const std::vector<int>& paddings,
                   const std::vector<int>& dilations,
                   MetaTensor* out);

C
Charles-hit 已提交
207 208 209 210 211 212 213
void FrameInferMeta(const MetaTensor& x,
                    int frame_length,
                    int hop_length,
                    int axis,
                    MetaTensor* out,
                    MetaConfig = MetaConfig());

214 215 216 217 218 219 220 221
void FullBatchSizeLikeInferMeta(const MetaTensor& x,
                                const std::vector<int>& shape,
                                const Scalar& val,
                                DataType dtype,
                                int x_batch_size_dim,
                                int out_batch_size_dim,
                                MetaTensor* out);

Z
zyfncg 已提交
222 223 224 225 226
void GumbelSoftmaxInferMeta(const MetaTensor& x,
                            float temperature,
                            bool hard,
                            int axis,
                            MetaTensor* out);
H
hong 已提交
227 228
void HistogramInferMeta(
    const MetaTensor& input, int64_t bins, int min, int max, MetaTensor* out);
Z
zyfncg 已提交
229

230 231
void IdentityLossInferMeta(const MetaTensor& x, int reduction, MetaTensor* out);

232 233
void IncrementInferMeta(const MetaTensor& x, float value, MetaTensor* out);

234 235 236
void InferMetaFromVecValue(const MetaTensor& x,
                           const std::vector<int64_t>& shape,
                           MetaTensor* out);
237

238 239
void InverseInferMeta(const MetaTensor& x, MetaTensor* out);

W
WJJ1995 已提交
240 241
void IsEmptyInferMeta(const MetaTensor& x, MetaTensor* out);

Z
zyfncg 已提交
242 243
void IsfiniteInferMeta(const MetaTensor& input, MetaTensor* out);

244 245 246 247 248 249 250 251
void KthvalueInferMeta(const MetaTensor& x,
                       int k,
                       int axis,
                       bool keepdim,
                       MetaTensor* out,
                       MetaTensor* indices,
                       MetaConfig = MetaConfig());

252 253 254 255 256 257
void LogsumexpInferMeta(const MetaTensor& input,
                        const std::vector<int64_t>& axis,
                        bool keepdim,
                        bool reduce_all,
                        MetaTensor* out);

L
Lin Manhui 已提交
258 259 260 261 262 263
void LUInferMeta(const MetaTensor& x,
                 bool pivot,
                 MetaTensor* out,
                 MetaTensor* pivots,
                 MetaTensor* infos);

264 265
void MatrixPowerInferMeta(const MetaTensor& x, int n, MetaTensor* out);

266 267
void MatrixRankInferMeta(const MetaTensor& x,
                         bool hermitian,
268
                         bool use_default_tol,
269 270
                         MetaTensor* out);

271 272 273 274 275
void MaxOutInferMeta(const MetaTensor& x,
                     int groups,
                     int axis,
                     MetaTensor* out);

F
From00 已提交
276 277 278 279 280 281 282 283 284 285
void MaxPoolWithIndexInferMeta(const MetaTensor& x,
                               const std::vector<int>& kernel_size,
                               const std::vector<int>& strides,
                               const std::vector<int>& paddings,
                               bool global_pooling,
                               bool adaptive,
                               MetaTensor* out,
                               MetaTensor* mask,
                               MetaConfig config = MetaConfig());

286 287
void MeanAllInferMeta(const MetaTensor& x, MetaTensor* out);

288 289 290 291 292 293
void ModeInferMeta(const MetaTensor& x,
                   int axis,
                   bool keepdim,
                   MetaTensor* out,
                   MetaTensor* indices);

294
void MultinomialInferMeta(const MetaTensor& x,
295
                          const Scalar& num_samples,
296
                          bool replacement,
297 298
                          MetaTensor* out,
                          MetaConfig config = MetaConfig());
299 300 301 302 303 304 305

void NanmedianInferMeta(const MetaTensor& x,
                        const IntArray& axes,
                        bool keep_dim,
                        MetaTensor* out,
                        MetaTensor* median_index);

306 307
void NonZeroInferMeta(const MetaTensor& condition, MetaTensor* out);

308 309
void NMSInferMeta(const MetaTensor& x, float threshold, MetaTensor* out);

H
hong 已提交
310 311 312 313 314 315
void NormInferMeta(const MetaTensor& x,
                   int axis,
                   float epsilon,
                   bool is_test,
                   MetaTensor* out,
                   MetaTensor* norm);
316

317 318 319 320 321 322 323 324
void OneHotRawInferMeta(const MetaTensor& x,
                        const Scalar& depth,
                        DataType dtype,
                        bool allow_out_of_range,
                        MetaTensor* out);

void OneHotInferMeta(const MetaTensor& x, const Scalar& depth, MetaTensor* out);

325 326 327 328 329 330
void OverlapAddInferMeta(const MetaTensor& x,
                         int hop_length,
                         int axis,
                         MetaTensor* out,
                         MetaConfig config = MetaConfig());

Z
zyfncg 已提交
331 332
void PadInferMeta(const MetaTensor& input,
                  const std::vector<int>& paddings,
333
                  const Scalar& padding_value,
Z
zyfncg 已提交
334 335 336
                  MetaTensor* out,
                  MetaConfig config = MetaConfig());

337
void Pad3dInferMeta(const MetaTensor& x,
338
                    const IntArray& paddings,
339 340 341 342 343 344
                    const std::string& mode,
                    float value,
                    const std::string& data_format,
                    MetaTensor* out,
                    MetaConfig config = MetaConfig());

Z
zyfncg 已提交
345 346 347 348 349
void PixelShuffleInferMeta(const MetaTensor& x,
                           int upscale_factor,
                           const std::string& data_format,
                           MetaTensor* out);

H
hong 已提交
350 351 352 353 354
void PixelShuffleGradInferMeta(const MetaTensor& out_grad,
                               int upscale_factor,
                               const std::string& data_format,
                               MetaTensor* x_grad);

355 356 357 358 359
void PixelUnshuffleInferMeta(const MetaTensor& x,
                             int downscale_factor,
                             const std::string& data_format,
                             MetaTensor* out);

360 361 362 363 364 365 366 367
void PNormInferMeta(const MetaTensor& x,
                    float porder,
                    int axis,
                    float epsilon,
                    bool keepdim,
                    bool asvector,
                    MetaTensor* out);

F
From00 已提交
368 369 370 371 372 373 374 375 376 377 378 379 380 381
void PoolInferMeta(const MetaTensor& x,
                   const std::vector<int>& kernel_size,
                   const std::vector<int>& strides,
                   const std::vector<int>& paddings,
                   bool ceil_mode,
                   bool exclusive,
                   const std::string& data_format,
                   const std::string& pooling_type,
                   bool global_pooling,
                   bool adaptive,
                   const std::string& padding_algorithm,
                   MetaTensor* out,
                   MetaConfig config = MetaConfig());

382 383 384 385 386 387 388 389 390 391 392 393 394 395
void Pool2DInferMeta(const MetaTensor& x,
                     const IntArray& kernel_size,
                     const std::vector<int>& strides,
                     const std::vector<int>& paddings,
                     bool ceil_mode,
                     bool exclusive,
                     const std::string& data_format,
                     const std::string& pooling_type,
                     bool global_pooling,
                     bool adaptive,
                     const std::string& padding_algorithm,
                     MetaTensor* out,
                     MetaConfig config = MetaConfig());

396 397 398 399 400
void QrInferMeta(const MetaTensor& x,
                 const std::string& mode,
                 MetaTensor* q,
                 MetaTensor* r);

Z
zyfncg 已提交
401 402 403 404 405 406 407 408 409 410 411 412 413
void RealAndImagInferMeta(const MetaTensor& x, MetaTensor* out);

void ReduceInferMeta(const MetaTensor& x,
                     const std::vector<int64_t>& axis,
                     bool keep_dim,
                     MetaTensor* out);

void ReduceInferMetaBase(const MetaTensor& x,
                         const std::vector<int64_t>& axis,
                         bool keep_dim,
                         bool reduce_all,
                         MetaTensor* out);

414 415 416 417 418 419 420 421 422 423 424 425 426
void ReduceIntArrayAxisInferMetaBase(const MetaTensor& x,
                                     const IntArray& axis,
                                     bool keep_dim,
                                     bool reduce_all,
                                     MetaTensor* out,
                                     MetaConfig config = MetaConfig());

void ReduceIntArrayAxisInferMeta(const MetaTensor& x,
                                 const IntArray& axis,
                                 bool keep_dim,
                                 MetaTensor* out,
                                 MetaConfig config = MetaConfig());

S
seemingwang 已提交
427 428 429 430 431
void RepeatInterleaveInferMeta(const MetaTensor& x,
                               int repeats,
                               int dim,
                               MetaTensor* out);

432
void ReshapeInferMeta(const MetaTensor& x,
433
                      const IntArray& shape,
434 435 436 437
                      MetaTensor* out,
                      MetaConfig config = MetaConfig());

void ReshapeWithXShapeInferMeta(const MetaTensor& x,
438
                                const IntArray& shape,
439
                                MetaTensor* out,
440
                                MetaTensor* xshape,
441
                                MetaConfig config = MetaConfig());
442

443
void ReverseInferMeta(const MetaTensor& x,
444 445 446
                      const IntArray& axis,
                      MetaTensor* out,
                      MetaConfig config = MetaConfig());
447

W
wanghuancoder 已提交
448
void ReverseArrayInferMeta(const std::vector<const phi::MetaTensor*>& x,
449 450 451
                           const IntArray& axis,
                           std::vector<phi::MetaTensor*> out,
                           MetaConfig config = MetaConfig());
W
wanghuancoder 已提交
452

C
chenenquan 已提交
453
void RollInferMeta(const MetaTensor& x,
454
                   const IntArray& shifts,
C
chenenquan 已提交
455 456 457
                   const std::vector<int64_t>& axis,
                   MetaTensor* out);

458 459 460 461 462 463 464 465 466 467 468
void RReluInferMeta(const MetaTensor& x,
                    float lower,
                    float upper,
                    bool is_test,
                    MetaTensor* out,
                    MetaTensor* noise);

void RReluGradInferMeta(const MetaTensor& out_grad,
                        const MetaTensor& noise,
                        MetaTensor* x_grad);

469 470
void SetValueInferMeta(const MetaTensor& x, MetaTensor* out);

471 472
void ShapeInferMeta(const MetaTensor& input, MetaTensor* out);

Z
zyfncg 已提交
473 474 475 476 477 478 479
void ShardIndexInferMeta(const MetaTensor& in,
                         int index_num,
                         int nshards,
                         int shard_id,
                         int ignore_value,
                         MetaTensor* out,
                         MetaConfig config = MetaConfig());
480

481
void NumelInferMeta(const MetaTensor& input, MetaTensor* out);
482

H
hong 已提交
483 484 485 486 487 488 489 490 491
void SliceRawInferMeta(const MetaTensor& input,
                       const std::vector<int64_t>& axes,
                       const IntArray& starts,
                       const IntArray& ends,
                       const std::vector<int64_t>& infer_flags,
                       const std::vector<int64_t>& decrease_axis,
                       MetaTensor* out,
                       MetaConfig config = MetaConfig());

Z
zyfncg 已提交
492
void SoftmaxInferMeta(const MetaTensor& x, int axis, MetaTensor* out);
493

C
Charles-hit 已提交
494 495 496 497 498 499 500 501 502
int GetSplitAxisValue(const MetaTensor& x,
                      const Scalar& axis,
                      MetaConfig config);

void FillSplitOutDims(const MetaTensor& x,
                      const int axis_value,
                      const std::vector<int64_t>& sections_vec,
                      std::vector<MetaTensor*>* out);

Z
zyfncg 已提交
503
void SplitInferMeta(const MetaTensor& x_meta,
C
Charles-hit 已提交
504
                    const IntArray& sections,
Z
zyfncg 已提交
505 506 507
                    const Scalar& axis,
                    std::vector<MetaTensor*> out,
                    MetaConfig config = MetaConfig());
508

C
Charles-hit 已提交
509 510 511 512 513 514
void SplitWithNumInferMeta(const MetaTensor& x_meta,
                           int num,
                           const Scalar& axis,
                           std::vector<MetaTensor*> out,
                           MetaConfig config = MetaConfig());

515 516
void SquaredL2NormInferMeta(const MetaTensor& x, MetaTensor* out);

517
void SqueezeInferMeta(const MetaTensor& x,
518 519 520
                      const IntArray& axes,
                      MetaTensor* out,
                      MetaConfig config = MetaConfig());
521 522

void SqueezeWithXShapeInferMeta(const MetaTensor& x,
523
                                const IntArray& axes,
524
                                MetaTensor* out,
525 526
                                MetaTensor* xshape,
                                MetaConfig config = MetaConfig());
527

528 529 530 531 532 533 534 535 536 537
void StridedSliceRawInferMeta(const MetaTensor& x,
                              const std::vector<int>& axes,
                              const IntArray& starts,
                              const IntArray& ends,
                              const IntArray& strides,
                              const std::vector<int>& infer_flags,
                              const std::vector<int>& decrease_axis,
                              MetaTensor* out,
                              MetaConfig config = MetaConfig());

538 539
void StridedSliceInferMeta(const MetaTensor& x,
                           const std::vector<int>& axes,
540 541 542
                           const IntArray& starts,
                           const IntArray& ends,
                           const IntArray& strides,
543 544 545
                           MetaTensor* out,
                           MetaConfig config = MetaConfig());

546
void SumInferMeta(const MetaTensor& x,
547
                  const IntArray& axis,
548 549
                  DataType dtype,
                  bool keep_dim,
550 551
                  MetaTensor* out,
                  MetaConfig config = MetaConfig());
552

Z
zyfncg 已提交
553
void SumRawInferMeta(const MetaTensor& x,
554
                     const IntArray& axis,
Z
zyfncg 已提交
555 556 557
                     bool keep_dim,
                     bool reduce_all,
                     DataType dtype,
558 559
                     MetaTensor* out,
                     MetaConfig config = MetaConfig());
Z
zyfncg 已提交
560

561 562 563 564 565 566
void SvdInferMeta(const MetaTensor& x,
                  bool full_matrices,
                  MetaTensor* u,
                  MetaTensor* s,
                  MetaTensor* vh);

H
hong 已提交
567 568 569 570 571 572 573
void TemporalShiftInferMeta(const MetaTensor& x,
                            int seg_num,
                            float shift_ratio,
                            const std::string& data_format,
                            MetaTensor* out,
                            MetaConfig config = MetaConfig());

Z
zyfncg 已提交
574
void TileInferMeta(const MetaTensor& x,
575
                   const IntArray& repeat_times,
Z
zyfncg 已提交
576 577 578
                   MetaTensor* out,
                   MetaConfig config = MetaConfig());

579 580 581 582 583 584 585 586 587
void TopKInferMeta(const MetaTensor& x,
                   const Scalar& k_scalar,
                   int axis,
                   bool largest,
                   bool sorted,
                   MetaTensor* out,
                   MetaTensor* indices,
                   MetaConfig config = MetaConfig());

Z
zyfncg 已提交
588 589 590
void TraceInferMeta(
    const MetaTensor& x, int offset, int axis1, int axis2, MetaTensor* out);

591
void TransferLayoutInferMeta(const MetaTensor& x,
592 593
                             int src_layout,
                             int dst_layout,
594 595
                             MetaTensor* out);

Z
zyfncg 已提交
596 597 598
void TransposeInferMeta(const MetaTensor& x,
                        const std::vector<int>& axis,
                        MetaTensor* out);
C
Chen Weihang 已提交
599

H
hong 已提交
600 601 602 603
void TransposeGradInferMeta(const MetaTensor& x,
                            const std::vector<int>& axis,
                            MetaTensor* out);

604 605 606 607 608 609 610 611
void TrilInferMeta(const MetaTensor& x, int diagonal, MetaTensor* out);

void TriuInferMeta(const MetaTensor& x, int diagonal, MetaTensor* out);

void TrilTriuInferMeta(const MetaTensor& x,
                       int diagonal,
                       bool lower,
                       MetaTensor* out);
612

L
Leo Chen 已提交
613 614
void UnbindInferMeta(const MetaTensor& x,
                     int axis,
615
                     std::vector<MetaTensor*> outs);
Z
zyfncg 已提交
616 617 618 619 620 621 622

void UnchangedInferMeta(const MetaTensor& x, MetaTensor* out);

// meta x -> out without change, check if axis in range [-Rank(x), Rank(x)-1]
void UnchangedInferMetaCheckAxis(const MetaTensor& x,
                                 int axis,
                                 MetaTensor* out);
C
Chen Weihang 已提交
623

624 625 626 627 628 629 630
void UnfoldInferMeta(const MetaTensor& x,
                     const std::vector<int>& kernel_sizes,
                     const std::vector<int>& strides,
                     const std::vector<int>& paddings,
                     const std::vector<int>& dilations,
                     MetaTensor* out,
                     MetaConfig config = MetaConfig());
631

632 633 634 635 636 637 638 639 640
void UniformRandomInplaceInferMeta(const MetaTensor& x,
                                   float min,
                                   float max,
                                   int seed,
                                   int diag_num,
                                   int diag_step,
                                   float diag_val,
                                   MetaTensor* out);

641 642 643 644 645 646 647 648 649
void UniqueConsecutiveInferMeta(const MetaTensor& x,
                                bool return_inverse,
                                bool return_counts,
                                const std::vector<int>& axis,
                                int dtype,
                                MetaTensor* out,
                                MetaTensor* index,
                                MetaTensor* counts);

C
csy0225 已提交
650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672
void UniqueInferMeta(const MetaTensor& x,
                     bool return_index,
                     bool return_inverse,
                     bool return_counts,
                     const std::vector<int>& axis,
                     DataType dtype,
                     MetaTensor* out,
                     MetaTensor* indices,
                     MetaTensor* index,
                     MetaTensor* counts);

void UniqueRawInferMeta(const MetaTensor& x,
                        bool return_index,
                        bool return_inverse,
                        bool return_counts,
                        const std::vector<int>& axis,
                        DataType dtype,
                        bool is_sorted,
                        MetaTensor* out,
                        MetaTensor* indices,
                        MetaTensor* index,
                        MetaTensor* counts);

673
void UnsqueezeInferMeta(const MetaTensor& x,
674
                        const IntArray& axes,
675 676
                        MetaTensor* out,
                        MetaConfig config = MetaConfig());
677

678 679 680 681 682 683
void UnsqueezeWithXShapeInferMeta(const MetaTensor& x,
                                  const IntArray& axes,
                                  MetaTensor* out,
                                  MetaTensor* xshape,
                                  MetaConfig config = MetaConfig());

C
csy0225 已提交
684 685 686 687 688
void UnStackInferMeta(const MetaTensor& x,
                      int axis,
                      int num,
                      std::vector<MetaTensor*> outs);

689
}  // namespace phi