enforce.h 20.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18 19 20 21 22 23 24 25 26 27 28
#ifdef __GNUC__
#include <cxxabi.h>  // for __cxa_demangle
#endif               // __GNUC__

#ifdef PADDLE_WITH_CUDA
#include <cublas_v2.h>
#include <cudnn.h>
#include <curand.h>
#include <thrust/system/cuda/error.h>
#include <thrust/system_error.h>
#endif  // PADDLE_WITH_CUDA

29
#include <fstream>
Y
Yu Yang 已提交
30
#include <iomanip>
31
#include <iostream>
L
liaogang 已提交
32
#include <memory>
33 34 35
#include <sstream>
#include <stdexcept>
#include <string>
S
sneaxiy 已提交
36 37
#include <type_traits>
#include <utility>
38

39
#define GLOG_NO_ABBREVIATED_SEVERITIES  // msvc conflict logging with windows.h
40
#include "glog/logging.h"
Y
Yi Wang 已提交
41
#include "paddle/fluid/platform/macros.h"
D
dzhwinter 已提交
42
#include "paddle/fluid/platform/port.h"
43 44
#include "paddle/fluid/string/printf.h"
#include "paddle/fluid/string/to_string.h"
45

46
#ifdef PADDLE_WITH_CUDA
Y
Yi Wang 已提交
47 48 49
#include "paddle/fluid/platform/dynload/cublas.h"
#include "paddle/fluid/platform/dynload/cudnn.h"
#include "paddle/fluid/platform/dynload/curand.h"
D
dzhwinter 已提交
50
#if !defined(__APPLE__) && !defined(_WIN32)
Y
Yi Wang 已提交
51
#include "paddle/fluid/platform/dynload/nccl.h"
Y
Yi Wang 已提交
52 53
#endif  // __APPLE__
#endif  // PADDLE_WITH_CUDA
54 55 56 57

namespace paddle {
namespace platform {

L
liaogang 已提交
58 59 60 61 62 63 64 65 66 67 68
#ifdef __GNUC__
inline std::string demangle(std::string name) {
  int status = -4;  // some arbitrary value to eliminate the compiler warning
  std::unique_ptr<char, void (*)(void*)> res{
      abi::__cxa_demangle(name.c_str(), NULL, NULL, &status), std::free};
  return (status == 0) ? res.get() : name;
}
#else
inline std::string demangle(std::string name) { return name; }
#endif

69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
template <typename StrType>
inline std::string GetTraceBackString(StrType&& what, const char* file,
                                      int line) {
  static constexpr int TRACE_STACK_LIMIT = 100;
  std::ostringstream sout;

  sout << string::Sprintf("%s at [%s:%d]", std::forward<StrType>(what), file,
                          line)
       << std::endl;
  sout << "PaddlePaddle Call Stacks: " << std::endl;
#if !defined(_WIN32)
  void* call_stack[TRACE_STACK_LIMIT];
  auto size = backtrace(call_stack, TRACE_STACK_LIMIT);
  auto symbols = backtrace_symbols(call_stack, size);
  Dl_info info;
  for (int i = 0; i < size; ++i) {
    if (dladdr(call_stack[i], &info) && info.dli_sname) {
      auto demangled = demangle(info.dli_sname);
      auto addr_offset = static_cast<char*>(call_stack[i]) -
                         static_cast<char*>(info.dli_saddr);
      sout << string::Sprintf("%-3d %*0p %s + %zd\n", i, 2 + sizeof(void*) * 2,
                              call_stack[i], demangled, addr_offset);
    } else {
      sout << string::Sprintf("%-3d %*0p\n", i, 2 + sizeof(void*) * 2,
                              call_stack[i]);
    }
  }
  free(symbols);
#else
  sout << "Windows not support stack backtrace yet.";
#endif
  return sout.str();
}

103 104
struct EnforceNotMet : public std::exception {
  std::string err_str_;
105
  EnforceNotMet(std::exception_ptr e, const char* file, int line) {
106
    try {
Y
Yu Yang 已提交
107 108
      std::rethrow_exception(e);
    } catch (std::exception& e) {
109
      err_str_ = GetTraceBackString(e.what(), file, line);
Y
Yu Yang 已提交
110 111
    }
  }
112

113
  EnforceNotMet(const std::string& str, const char* file, int line)
114
      : err_str_(GetTraceBackString(str, file, line)) {}
Y
Yu Yang 已提交
115 116

  const char* what() const noexcept override { return err_str_.c_str(); }
117 118
};

119 120
struct EOFException : public std::exception {
  std::string err_str_;
121 122
  EOFException(const char* err_msg, const char* file, int line) {
    err_str_ = string::Sprintf("%s at [%s:%d]", err_msg, file, line);
123 124
  }

125
  const char* what() const noexcept override { return err_str_.c_str(); }
126 127
};

L
liaogang 已提交
128 129 130 131 132
// Because most enforce conditions would evaluate to true, we can use
// __builtin_expect to instruct the C++ compiler to generate code that
// always forces branch prediction of true.
// This generates faster binary code. __builtin_expect is since C++11.
// For more details, please check https://stackoverflow.com/a/43870188/724872.
D
dzhwinter 已提交
133
#if !defined(_WIN32)
134
#define UNLIKELY(condition) __builtin_expect(static_cast<bool>(condition), 0)
D
dzhwinter 已提交
135 136
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
137
#define UNLIKELY(condition) (condition)
D
dzhwinter 已提交
138
#endif
139

140 141 142 143
#if !defined(_WIN32)
#define LIKELY(condition) __builtin_expect(static_cast<bool>(condition), 1)
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
144
#define LIKELY(condition) (condition)
145 146
#endif

M
minqiyang 已提交
147 148
inline bool is_error(bool stat) { return !stat; }

S
sneaxiy 已提交
149
inline void throw_on_error(bool stat, const std::string& msg) {
150
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
151
  throw std::runtime_error(msg);
152
#else
S
sneaxiy 已提交
153
  LOG(FATAL) << msg;
154
#endif
L
liaogang 已提交
155 156
}

157
#ifdef PADDLE_WITH_CUDA
158

S
sneaxiy 已提交
159
inline bool is_error(cudaError_t e) { return e != cudaSuccess; }
M
minqiyang 已提交
160

S
sneaxiy 已提交
161
inline void throw_on_error(cudaError_t e, const std::string& msg) {
162
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
163
  throw thrust::system_error(e, thrust::cuda_category(), msg);
164
#else
S
sneaxiy 已提交
165
  LOG(FATAL) << msg;
166
#endif
M
minqiyang 已提交
167 168 169 170
}

inline bool is_error(curandStatus_t stat) {
  return stat != CURAND_STATUS_SUCCESS;
171 172
}

S
sneaxiy 已提交
173
inline void throw_on_error(curandStatus_t stat, const std::string& msg) {
174
#ifndef REPLACE_ENFORCE_GLOG
M
minqiyang 已提交
175
  throw thrust::system_error(cudaErrorLaunchFailure, thrust::cuda_category(),
S
sneaxiy 已提交
176
                             msg);
177
#else
S
sneaxiy 已提交
178
  LOG(FATAL) << msg;
179
#endif
M
minqiyang 已提交
180 181 182 183
}

inline bool is_error(cudnnStatus_t stat) {
  return stat != CUDNN_STATUS_SUCCESS;
184 185
}

S
sneaxiy 已提交
186
inline void throw_on_error(cudnnStatus_t stat, const std::string& msg) {
187
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
188
  throw std::runtime_error(platform::dynload::cudnnGetErrorString(stat) + msg);
189
#else
S
sneaxiy 已提交
190
  LOG(FATAL) << platform::dynload::cudnnGetErrorString(stat) << msg;
191
#endif
M
minqiyang 已提交
192 193 194 195
}

inline bool is_error(cublasStatus_t stat) {
  return stat != CUBLAS_STATUS_SUCCESS;
196 197
}

S
sneaxiy 已提交
198
inline void throw_on_error(cublasStatus_t stat, const std::string& msg) {
L
liaogang 已提交
199
  std::string err;
M
minqiyang 已提交
200
  if (stat == CUBLAS_STATUS_NOT_INITIALIZED) {
L
liaogang 已提交
201
    err = "CUBLAS: not initialized, ";
202
  } else if (stat == CUBLAS_STATUS_ALLOC_FAILED) {
L
liaogang 已提交
203
    err = "CUBLAS: alloc failed, ";
204
  } else if (stat == CUBLAS_STATUS_INVALID_VALUE) {
L
liaogang 已提交
205
    err = "CUBLAS: invalid value, ";
206
  } else if (stat == CUBLAS_STATUS_ARCH_MISMATCH) {
L
liaogang 已提交
207
    err = "CUBLAS: arch mismatch, ";
208
  } else if (stat == CUBLAS_STATUS_MAPPING_ERROR) {
L
liaogang 已提交
209
    err = "CUBLAS: mapping error, ";
210
  } else if (stat == CUBLAS_STATUS_EXECUTION_FAILED) {
L
liaogang 已提交
211
    err = "CUBLAS: execution failed, ";
212
  } else if (stat == CUBLAS_STATUS_INTERNAL_ERROR) {
L
liaogang 已提交
213
    err = "CUBLAS: internal error, ";
214
  } else if (stat == CUBLAS_STATUS_NOT_SUPPORTED) {
L
liaogang 已提交
215
    err = "CUBLAS: not supported, ";
216
  } else if (stat == CUBLAS_STATUS_LICENSE_ERROR) {
L
liaogang 已提交
217
    err = "CUBLAS: license error, ";
218
  }
219
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
220
  throw std::runtime_error(err + msg);
221
#else
S
sneaxiy 已提交
222
  LOG(FATAL) << err << msg;
223
#endif
224 225
}

D
done  
dzhwinter 已提交
226
#if !defined(__APPLE__) && !defined(_WIN32)
S
sneaxiy 已提交
227 228 229 230 231
inline bool is_error(ncclResult_t nccl_result) {
  return nccl_result != ncclSuccess;
}

inline void throw_on_error(ncclResult_t stat, const std::string& msg) {
232
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
233
  throw std::runtime_error(platform::dynload::ncclGetErrorString(stat) + msg);
234
#else
S
sneaxiy 已提交
235
  LOG(FATAL) << platform::dynload::ncclGetErrorString(stat) << msg;
236
#endif
Y
Yu Yang 已提交
237
}
D
dzhwinter 已提交
238
#endif  // __APPLE__ and windows
239
#endif  // PADDLE_WITH_CUDA
240

241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265
#ifdef PADDLE_WITH_CUDA
namespace details {

template <typename T>
struct CudaStatusType {};

#define DEFINE_CUDA_STATUS_TYPE(type, success_value) \
  template <>                                        \
  struct CudaStatusType<type> {                      \
    using Type = type;                               \
    static constexpr Type kSuccess = success_value;  \
  }

DEFINE_CUDA_STATUS_TYPE(cudaError_t, cudaSuccess);
DEFINE_CUDA_STATUS_TYPE(curandStatus_t, CURAND_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cudnnStatus_t, CUDNN_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cublasStatus_t, CUBLAS_STATUS_SUCCESS);

#if !defined(__APPLE__) && !defined(_WIN32)
DEFINE_CUDA_STATUS_TYPE(ncclResult_t, ncclSuccess);
#endif

}  // namespace details
#endif

S
sneaxiy 已提交
266 267 268 269 270
#define PADDLE_THROW(...)                                            \
  do {                                                               \
    throw ::paddle::platform::EnforceNotMet(                         \
        ::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
  } while (0)
S
sneaxiy 已提交
271

272 273 274 275 276 277 278 279 280 281 282 283 284
#if defined(__CUDA_ARCH__)
// For cuda, the assertions can affect performance and it is therefore
// recommended to disable them in production code
// https://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html#assertion
#define PADDLE_ENFORCE(_IS_NOT_ERROR, __FORMAT, ...)                   \
  do {                                                                 \
    if (!(_IS_NOT_ERROR)) {                                            \
      printf("Exception: %s:%d Assertion `%s` failed. " __FORMAT "\n", \
             __FILE__, __LINE__, #_IS_NOT_ERROR, ##__VA_ARGS__);       \
      asm("trap;");                                                    \
    }                                                                  \
  } while (0)
#else
S
sneaxiy 已提交
285 286 287 288 289 290 291 292 293 294 295 296
#define PADDLE_ENFORCE(COND, ...)                                         \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    if (UNLIKELY(::paddle::platform::is_error(__cond__))) {               \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
M
minqiyang 已提交
297
  } while (0)
298
#endif
M
minqiyang 已提交
299

300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321
#ifdef PADDLE_WITH_CUDA
#define PADDLE_ENFORCE_CUDA_SUCCESS(COND, ...)                            \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    using __CUDA_STATUS_TYPE__ = decltype(__cond__);                      \
    constexpr auto __success_type__ =                                     \
        ::paddle::platform::details::CudaStatusType<                      \
            __CUDA_STATUS_TYPE__>::kSuccess;                              \
    if (UNLIKELY(__cond__ != __success_type__)) {                         \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
  } while (0)

#undef DEFINE_CUDA_STATUS_TYPE
#endif

D
dzhwinter 已提交
322 323 324 325
#define PADDLE_THROW_EOF()                                                     \
  do {                                                                         \
    throw ::paddle::platform::EOFException("There is no next data.", __FILE__, \
                                           __LINE__);                          \
326
  } while (0)
D
dzhwinter 已提交
327

328 329 330 331 332 333
#define PADDLE_THROW_BAD_ALLOC(...)                                  \
  do {                                                               \
    throw ::paddle::memory::allocation::BadAlloc(                    \
        ::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
  } while (0)

S
Superjom 已提交
334 335 336 337 338 339 340
/*
 * Some enforce helpers here, usage:
 *    int a = 1;
 *    int b = 2;
 *    PADDLE_ENFORCE_EQ(a, b);
 *
 *    will raise an expression described as follows:
C
chenweihang 已提交
341
 *    "Enforce failed. Expected input a == b, but received a(1) != b(2)."
342
 *      with detailed stack information.
S
Superjom 已提交
343 344 345 346
 *
 *    extra messages is also supported, for example:
 *    PADDLE_ENFORCE(a, b, "some simple enforce failed between %d numbers", 2)
 */
S
sneaxiy 已提交
347 348 349 350 351 352
#define PADDLE_ENFORCE_NOT_NULL(__VAL, ...)                 \
  do {                                                      \
    if (UNLIKELY(nullptr == (__VAL))) {                     \
      PADDLE_THROW(#__VAL " should not be null\n%s",        \
                   ::paddle::string::Sprintf(__VA_ARGS__)); \
    }                                                       \
353 354
  } while (0)

S
sneaxiy 已提交
355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390
namespace details {
template <typename T>
inline constexpr bool IsArithmetic() {
  return std::is_arithmetic<T>::value;
}

template <typename T1, typename T2, bool kIsArithmetic /* = true */>
struct TypeConverterImpl {
  using Type1 = typename std::common_type<T1, T2>::type;
  using Type2 = Type1;
};

template <typename T1, typename T2>
struct TypeConverterImpl<T1, T2, false> {
  using Type1 = T1;
  using Type2 = T2;
};

template <typename T1, typename T2>
struct TypeConverter {
 private:
  static constexpr bool kIsArithmetic =
      IsArithmetic<T1>() && IsArithmetic<T2>();

 public:
  using Type1 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type1;
  using Type2 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type2;
};

template <typename T1, typename T2>
using CommonType1 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type1>::type>::type;

template <typename T1, typename T2>
using CommonType2 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type2>::type>::type;
391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429

// Here, we use SFINAE to check whether T can be converted to std::string
template <typename T>
struct CanToString {
 private:
  using YesType = uint8_t;
  using NoType = uint16_t;

  template <typename U>
  static YesType Check(decltype(std::cout << std::declval<U>())) {
    return 0;
  }

  template <typename U>
  static NoType Check(...) {
    return 0;
  }

 public:
  static constexpr bool kValue =
      std::is_same<YesType, decltype(Check<T>(std::cout))>::value;
};

template <bool kCanToString /* = true */>
struct BinaryCompareMessageConverter {
  template <typename T>
  static std::string Convert(const char* expression, const T& value) {
    return expression + std::string(":") + string::to_string(value);
  }
};

template <>
struct BinaryCompareMessageConverter<false> {
  template <typename T>
  static const char* Convert(const char* expression, const T& value) {
    return expression;
  }
};

S
sneaxiy 已提交
430 431
}  // namespace details

432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456
#define __PADDLE_BINARY_COMPARE(__VAL1, __VAL2, __CMP, __INV_CMP, ...)         \
  do {                                                                         \
    auto __val1 = (__VAL1);                                                    \
    auto __val2 = (__VAL2);                                                    \
    using __TYPE1__ = decltype(__val1);                                        \
    using __TYPE2__ = decltype(__val2);                                        \
    using __COMMON_TYPE1__ =                                                   \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>;        \
    using __COMMON_TYPE2__ =                                                   \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>;        \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP(        \
        static_cast<__COMMON_TYPE2__>(__val2));                                \
    if (UNLIKELY(!__is_not_error)) {                                           \
      constexpr bool __kCanToString__ =                                        \
          ::paddle::platform::details::CanToString<__TYPE1__>::kValue &&       \
          ::paddle::platform::details::CanToString<__TYPE2__>::kValue;         \
      PADDLE_THROW("Enforce failed. Expected %s " #__CMP                       \
                   " %s, but received %s " #__INV_CMP " %s.\n%s",              \
                   #__VAL1, #__VAL2,                                           \
                   ::paddle::platform::details::BinaryCompareMessageConverter< \
                       __kCanToString__>::Convert(#__VAL1, __val1),            \
                   ::paddle::platform::details::BinaryCompareMessageConverter< \
                       __kCanToString__>::Convert(#__VAL2, __val2),            \
                   ::paddle::string::Sprintf(__VA_ARGS__));                    \
    }                                                                          \
457
  } while (0)
W
wopeizl 已提交
458 459 460 461 462 463 464 465 466 467 468 469 470

#define PADDLE_ENFORCE_EQ(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_ENFORCE_NE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_ENFORCE_GT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_ENFORCE_GE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_ENFORCE_LT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_ENFORCE_LE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <=, >, __VA_ARGS__)
S
add EQ  
Superjom 已提交
471

X
xuezhong 已提交
472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512
#define __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL1, __VAL2, __CMP, \
                                           __INV_CMP, ...)               \
  do {                                                                   \
    auto __val1 = (__VAL1);                                              \
    auto __val2 = (__VAL2);                                              \
    if (!__CTX->IsRuntime()) {                                           \
      if (__val1 == -1 || __val2 == -1) {                                \
        break;                                                           \
      }                                                                  \
    }                                                                    \
    using __TYPE1__ = decltype(__val1);                                  \
    using __TYPE2__ = decltype(__val2);                                  \
    using __COMMON_TYPE1__ =                                             \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>;  \
    using __COMMON_TYPE2__ =                                             \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>;  \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP(  \
        static_cast<__COMMON_TYPE2__>(__val2));                          \
    if (UNLIKELY(!__is_not_error)) {                                     \
      PADDLE_THROW("Enforce failed. Expected %s " #__CMP                 \
                   " %s, but received %s:%s " #__INV_CMP " %s:%s.\n%s",  \
                   #__VAL1, #__VAL2, #__VAL1,                            \
                   ::paddle::string::to_string(__val1), #__VAL2,         \
                   ::paddle::string::to_string(__val2),                  \
                   ::paddle::string::Sprintf(__VA_ARGS__));              \
    }                                                                    \
  } while (0)

#define PADDLE_INFERSHAPE_ENFORCE_EQ(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_NE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <=, >, __VA_ARGS__)

513 514
}  // namespace platform
}  // namespace paddle