enforce.h 20.6 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18 19 20 21 22 23 24 25 26 27 28
#ifdef __GNUC__
#include <cxxabi.h>  // for __cxa_demangle
#endif               // __GNUC__

#ifdef PADDLE_WITH_CUDA
#include <cublas_v2.h>
#include <cudnn.h>
#include <curand.h>
#include <thrust/system/cuda/error.h>
#include <thrust/system_error.h>
#endif  // PADDLE_WITH_CUDA

29
#include <fstream>
Y
Yu Yang 已提交
30
#include <iomanip>
31
#include <iostream>
L
liaogang 已提交
32
#include <memory>
33 34 35
#include <sstream>
#include <stdexcept>
#include <string>
S
sneaxiy 已提交
36 37
#include <type_traits>
#include <utility>
38

39
#define GLOG_NO_ABBREVIATED_SEVERITIES  // msvc conflict logging with windows.h
40
#include "glog/logging.h"
Y
Yi Wang 已提交
41
#include "paddle/fluid/platform/macros.h"
D
dzhwinter 已提交
42
#include "paddle/fluid/platform/port.h"
43 44
#include "paddle/fluid/string/printf.h"
#include "paddle/fluid/string/to_string.h"
45

46
#ifdef PADDLE_WITH_CUDA
Y
Yi Wang 已提交
47 48 49
#include "paddle/fluid/platform/dynload/cublas.h"
#include "paddle/fluid/platform/dynload/cudnn.h"
#include "paddle/fluid/platform/dynload/curand.h"
D
dzhwinter 已提交
50
#if !defined(__APPLE__) && !defined(_WIN32)
Y
Yi Wang 已提交
51
#include "paddle/fluid/platform/dynload/nccl.h"
Y
Yi Wang 已提交
52 53
#endif  // __APPLE__
#endif  // PADDLE_WITH_CUDA
54

55 56
#define WITH_SIMPLE_TRACEBACK

57 58 59
namespace paddle {
namespace platform {

L
liaogang 已提交
60 61 62 63 64 65 66 67 68 69 70
#ifdef __GNUC__
inline std::string demangle(std::string name) {
  int status = -4;  // some arbitrary value to eliminate the compiler warning
  std::unique_ptr<char, void (*)(void*)> res{
      abi::__cxa_demangle(name.c_str(), NULL, NULL, &status), std::free};
  return (status == 0) ? res.get() : name;
}
#else
inline std::string demangle(std::string name) { return name; }
#endif

71 72 73 74 75 76 77 78 79 80 81
template <typename StrType>
inline std::string GetTraceBackString(StrType&& what, const char* file,
                                      int line) {
  static constexpr int TRACE_STACK_LIMIT = 100;
  std::ostringstream sout;

#if !defined(_WIN32)
  void* call_stack[TRACE_STACK_LIMIT];
  auto size = backtrace(call_stack, TRACE_STACK_LIMIT);
  auto symbols = backtrace_symbols(call_stack, size);
  Dl_info info;
82
  int idx = 0;
83 84 85
  for (int i = 0; i < size; ++i) {
    if (dladdr(call_stack[i], &info) && info.dli_sname) {
      auto demangled = demangle(info.dli_sname);
86 87 88 89 90 91 92
#ifdef WITH_SIMPLE_TRACEBACK
      std::string path(info.dli_fname);
      // C++ traceback info are from core.so
      if (path.substr(path.length() - 3).compare(".so") == 0) {
        sout << string::Sprintf("%-3d %s\n", idx++, demangled);
      }
#else
93 94 95 96 97 98 99
      auto addr_offset = static_cast<char*>(call_stack[i]) -
                         static_cast<char*>(info.dli_saddr);
      sout << string::Sprintf("%-3d %*0p %s + %zd\n", i, 2 + sizeof(void*) * 2,
                              call_stack[i], demangled, addr_offset);
    } else {
      sout << string::Sprintf("%-3d %*0p\n", i, 2 + sizeof(void*) * 2,
                              call_stack[i]);
100
#endif
101 102 103 104 105 106
    }
  }
  free(symbols);
#else
  sout << "Windows not support stack backtrace yet.";
#endif
107 108 109
  sout << string::Sprintf("PaddleCheckError: %s at [%s:%d]",
                          std::forward<StrType>(what), file, line)
       << std::endl;
110 111 112
  return sout.str();
}

113 114
struct EnforceNotMet : public std::exception {
  std::string err_str_;
115
  EnforceNotMet(std::exception_ptr e, const char* file, int line) {
116
    try {
Y
Yu Yang 已提交
117 118
      std::rethrow_exception(e);
    } catch (std::exception& e) {
119
      err_str_ = GetTraceBackString(e.what(), file, line);
Y
Yu Yang 已提交
120 121
    }
  }
122

123
  EnforceNotMet(const std::string& str, const char* file, int line)
124
      : err_str_(GetTraceBackString(str, file, line)) {}
Y
Yu Yang 已提交
125 126

  const char* what() const noexcept override { return err_str_.c_str(); }
127 128
};

129 130
struct EOFException : public std::exception {
  std::string err_str_;
131 132
  EOFException(const char* err_msg, const char* file, int line) {
    err_str_ = string::Sprintf("%s at [%s:%d]", err_msg, file, line);
133 134
  }

135
  const char* what() const noexcept override { return err_str_.c_str(); }
136 137
};

L
liaogang 已提交
138 139 140 141 142
// Because most enforce conditions would evaluate to true, we can use
// __builtin_expect to instruct the C++ compiler to generate code that
// always forces branch prediction of true.
// This generates faster binary code. __builtin_expect is since C++11.
// For more details, please check https://stackoverflow.com/a/43870188/724872.
D
dzhwinter 已提交
143
#if !defined(_WIN32)
144
#define UNLIKELY(condition) __builtin_expect(static_cast<bool>(condition), 0)
D
dzhwinter 已提交
145 146
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
147
#define UNLIKELY(condition) (condition)
D
dzhwinter 已提交
148
#endif
149

150 151 152 153
#if !defined(_WIN32)
#define LIKELY(condition) __builtin_expect(static_cast<bool>(condition), 1)
#else
// there is no equivalent intrinsics in msvc.
W
wopeizl 已提交
154
#define LIKELY(condition) (condition)
155 156
#endif

M
minqiyang 已提交
157 158
inline bool is_error(bool stat) { return !stat; }

S
sneaxiy 已提交
159
inline void throw_on_error(bool stat, const std::string& msg) {
160
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
161
  throw std::runtime_error(msg);
162
#else
S
sneaxiy 已提交
163
  LOG(FATAL) << msg;
164
#endif
L
liaogang 已提交
165 166
}

167
#ifdef PADDLE_WITH_CUDA
168

S
sneaxiy 已提交
169
inline bool is_error(cudaError_t e) { return e != cudaSuccess; }
M
minqiyang 已提交
170

S
sneaxiy 已提交
171
inline void throw_on_error(cudaError_t e, const std::string& msg) {
172
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
173
  throw thrust::system_error(e, thrust::cuda_category(), msg);
174
#else
S
sneaxiy 已提交
175
  LOG(FATAL) << msg;
176
#endif
M
minqiyang 已提交
177 178 179 180
}

inline bool is_error(curandStatus_t stat) {
  return stat != CURAND_STATUS_SUCCESS;
181 182
}

S
sneaxiy 已提交
183
inline void throw_on_error(curandStatus_t stat, const std::string& msg) {
184
#ifndef REPLACE_ENFORCE_GLOG
M
minqiyang 已提交
185
  throw thrust::system_error(cudaErrorLaunchFailure, thrust::cuda_category(),
S
sneaxiy 已提交
186
                             msg);
187
#else
S
sneaxiy 已提交
188
  LOG(FATAL) << msg;
189
#endif
M
minqiyang 已提交
190 191 192 193
}

inline bool is_error(cudnnStatus_t stat) {
  return stat != CUDNN_STATUS_SUCCESS;
194 195
}

S
sneaxiy 已提交
196
inline void throw_on_error(cudnnStatus_t stat, const std::string& msg) {
197
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
198
  throw std::runtime_error(platform::dynload::cudnnGetErrorString(stat) + msg);
199
#else
S
sneaxiy 已提交
200
  LOG(FATAL) << platform::dynload::cudnnGetErrorString(stat) << msg;
201
#endif
M
minqiyang 已提交
202 203 204 205
}

inline bool is_error(cublasStatus_t stat) {
  return stat != CUBLAS_STATUS_SUCCESS;
206 207
}

S
sneaxiy 已提交
208
inline void throw_on_error(cublasStatus_t stat, const std::string& msg) {
L
liaogang 已提交
209
  std::string err;
M
minqiyang 已提交
210
  if (stat == CUBLAS_STATUS_NOT_INITIALIZED) {
L
liaogang 已提交
211
    err = "CUBLAS: not initialized, ";
212
  } else if (stat == CUBLAS_STATUS_ALLOC_FAILED) {
L
liaogang 已提交
213
    err = "CUBLAS: alloc failed, ";
214
  } else if (stat == CUBLAS_STATUS_INVALID_VALUE) {
L
liaogang 已提交
215
    err = "CUBLAS: invalid value, ";
216
  } else if (stat == CUBLAS_STATUS_ARCH_MISMATCH) {
L
liaogang 已提交
217
    err = "CUBLAS: arch mismatch, ";
218
  } else if (stat == CUBLAS_STATUS_MAPPING_ERROR) {
L
liaogang 已提交
219
    err = "CUBLAS: mapping error, ";
220
  } else if (stat == CUBLAS_STATUS_EXECUTION_FAILED) {
L
liaogang 已提交
221
    err = "CUBLAS: execution failed, ";
222
  } else if (stat == CUBLAS_STATUS_INTERNAL_ERROR) {
L
liaogang 已提交
223
    err = "CUBLAS: internal error, ";
224
  } else if (stat == CUBLAS_STATUS_NOT_SUPPORTED) {
L
liaogang 已提交
225
    err = "CUBLAS: not supported, ";
226
  } else if (stat == CUBLAS_STATUS_LICENSE_ERROR) {
L
liaogang 已提交
227
    err = "CUBLAS: license error, ";
228
  }
229
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
230
  throw std::runtime_error(err + msg);
231
#else
S
sneaxiy 已提交
232
  LOG(FATAL) << err << msg;
233
#endif
234 235
}

D
done  
dzhwinter 已提交
236
#if !defined(__APPLE__) && !defined(_WIN32)
S
sneaxiy 已提交
237 238 239 240 241
inline bool is_error(ncclResult_t nccl_result) {
  return nccl_result != ncclSuccess;
}

inline void throw_on_error(ncclResult_t stat, const std::string& msg) {
242
#ifndef REPLACE_ENFORCE_GLOG
S
sneaxiy 已提交
243
  throw std::runtime_error(platform::dynload::ncclGetErrorString(stat) + msg);
244
#else
S
sneaxiy 已提交
245
  LOG(FATAL) << platform::dynload::ncclGetErrorString(stat) << msg;
246
#endif
Y
Yu Yang 已提交
247
}
D
dzhwinter 已提交
248
#endif  // __APPLE__ and windows
249
#endif  // PADDLE_WITH_CUDA
250

251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275
#ifdef PADDLE_WITH_CUDA
namespace details {

template <typename T>
struct CudaStatusType {};

#define DEFINE_CUDA_STATUS_TYPE(type, success_value) \
  template <>                                        \
  struct CudaStatusType<type> {                      \
    using Type = type;                               \
    static constexpr Type kSuccess = success_value;  \
  }

DEFINE_CUDA_STATUS_TYPE(cudaError_t, cudaSuccess);
DEFINE_CUDA_STATUS_TYPE(curandStatus_t, CURAND_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cudnnStatus_t, CUDNN_STATUS_SUCCESS);
DEFINE_CUDA_STATUS_TYPE(cublasStatus_t, CUBLAS_STATUS_SUCCESS);

#if !defined(__APPLE__) && !defined(_WIN32)
DEFINE_CUDA_STATUS_TYPE(ncclResult_t, ncclSuccess);
#endif

}  // namespace details
#endif

S
sneaxiy 已提交
276 277 278 279 280
#define PADDLE_THROW(...)                                            \
  do {                                                               \
    throw ::paddle::platform::EnforceNotMet(                         \
        ::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
  } while (0)
S
sneaxiy 已提交
281

282 283 284 285 286 287 288 289 290 291 292 293 294
#if defined(__CUDA_ARCH__)
// For cuda, the assertions can affect performance and it is therefore
// recommended to disable them in production code
// https://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html#assertion
#define PADDLE_ENFORCE(_IS_NOT_ERROR, __FORMAT, ...)                   \
  do {                                                                 \
    if (!(_IS_NOT_ERROR)) {                                            \
      printf("Exception: %s:%d Assertion `%s` failed. " __FORMAT "\n", \
             __FILE__, __LINE__, #_IS_NOT_ERROR, ##__VA_ARGS__);       \
      asm("trap;");                                                    \
    }                                                                  \
  } while (0)
#else
S
sneaxiy 已提交
295 296 297 298 299 300 301 302 303 304 305 306
#define PADDLE_ENFORCE(COND, ...)                                         \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    if (UNLIKELY(::paddle::platform::is_error(__cond__))) {               \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
M
minqiyang 已提交
307
  } while (0)
308
#endif
M
minqiyang 已提交
309

310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331
#ifdef PADDLE_WITH_CUDA
#define PADDLE_ENFORCE_CUDA_SUCCESS(COND, ...)                            \
  do {                                                                    \
    auto __cond__ = (COND);                                               \
    using __CUDA_STATUS_TYPE__ = decltype(__cond__);                      \
    constexpr auto __success_type__ =                                     \
        ::paddle::platform::details::CudaStatusType<                      \
            __CUDA_STATUS_TYPE__>::kSuccess;                              \
    if (UNLIKELY(__cond__ != __success_type__)) {                         \
      try {                                                               \
        ::paddle::platform::throw_on_error(                               \
            __cond__, ::paddle::string::Sprintf(__VA_ARGS__));            \
      } catch (...) {                                                     \
        throw ::paddle::platform::EnforceNotMet(std::current_exception(), \
                                                __FILE__, __LINE__);      \
      }                                                                   \
    }                                                                     \
  } while (0)

#undef DEFINE_CUDA_STATUS_TYPE
#endif

D
dzhwinter 已提交
332 333 334 335
#define PADDLE_THROW_EOF()                                                     \
  do {                                                                         \
    throw ::paddle::platform::EOFException("There is no next data.", __FILE__, \
                                           __LINE__);                          \
336
  } while (0)
D
dzhwinter 已提交
337

338 339 340 341 342 343
#define PADDLE_THROW_BAD_ALLOC(...)                                  \
  do {                                                               \
    throw ::paddle::memory::allocation::BadAlloc(                    \
        ::paddle::string::Sprintf(__VA_ARGS__), __FILE__, __LINE__); \
  } while (0)

S
Superjom 已提交
344 345 346 347 348 349 350
/*
 * Some enforce helpers here, usage:
 *    int a = 1;
 *    int b = 2;
 *    PADDLE_ENFORCE_EQ(a, b);
 *
 *    will raise an expression described as follows:
351
 *    "Expected input a == b, but received a(1) != b(2)."
352
 *      with detailed stack information.
S
Superjom 已提交
353 354 355 356
 *
 *    extra messages is also supported, for example:
 *    PADDLE_ENFORCE(a, b, "some simple enforce failed between %d numbers", 2)
 */
S
sneaxiy 已提交
357 358 359 360 361 362
#define PADDLE_ENFORCE_NOT_NULL(__VAL, ...)                 \
  do {                                                      \
    if (UNLIKELY(nullptr == (__VAL))) {                     \
      PADDLE_THROW(#__VAL " should not be null\n%s",        \
                   ::paddle::string::Sprintf(__VA_ARGS__)); \
    }                                                       \
363 364
  } while (0)

S
sneaxiy 已提交
365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
namespace details {
template <typename T>
inline constexpr bool IsArithmetic() {
  return std::is_arithmetic<T>::value;
}

template <typename T1, typename T2, bool kIsArithmetic /* = true */>
struct TypeConverterImpl {
  using Type1 = typename std::common_type<T1, T2>::type;
  using Type2 = Type1;
};

template <typename T1, typename T2>
struct TypeConverterImpl<T1, T2, false> {
  using Type1 = T1;
  using Type2 = T2;
};

template <typename T1, typename T2>
struct TypeConverter {
 private:
  static constexpr bool kIsArithmetic =
      IsArithmetic<T1>() && IsArithmetic<T2>();

 public:
  using Type1 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type1;
  using Type2 = typename TypeConverterImpl<T1, T2, kIsArithmetic>::Type2;
};

template <typename T1, typename T2>
using CommonType1 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type1>::type>::type;

template <typename T1, typename T2>
using CommonType2 = typename std::add_lvalue_reference<
    typename std::add_const<typename TypeConverter<T1, T2>::Type2>::type>::type;
401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439

// Here, we use SFINAE to check whether T can be converted to std::string
template <typename T>
struct CanToString {
 private:
  using YesType = uint8_t;
  using NoType = uint16_t;

  template <typename U>
  static YesType Check(decltype(std::cout << std::declval<U>())) {
    return 0;
  }

  template <typename U>
  static NoType Check(...) {
    return 0;
  }

 public:
  static constexpr bool kValue =
      std::is_same<YesType, decltype(Check<T>(std::cout))>::value;
};

template <bool kCanToString /* = true */>
struct BinaryCompareMessageConverter {
  template <typename T>
  static std::string Convert(const char* expression, const T& value) {
    return expression + std::string(":") + string::to_string(value);
  }
};

template <>
struct BinaryCompareMessageConverter<false> {
  template <typename T>
  static const char* Convert(const char* expression, const T& value) {
    return expression;
  }
};

S
sneaxiy 已提交
440 441
}  // namespace details

442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457
#define __PADDLE_BINARY_COMPARE(__VAL1, __VAL2, __CMP, __INV_CMP, ...)         \
  do {                                                                         \
    auto __val1 = (__VAL1);                                                    \
    auto __val2 = (__VAL2);                                                    \
    using __TYPE1__ = decltype(__val1);                                        \
    using __TYPE2__ = decltype(__val2);                                        \
    using __COMMON_TYPE1__ =                                                   \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>;        \
    using __COMMON_TYPE2__ =                                                   \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>;        \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP(        \
        static_cast<__COMMON_TYPE2__>(__val2));                                \
    if (UNLIKELY(!__is_not_error)) {                                           \
      constexpr bool __kCanToString__ =                                        \
          ::paddle::platform::details::CanToString<__TYPE1__>::kValue &&       \
          ::paddle::platform::details::CanToString<__TYPE2__>::kValue;         \
458 459
      PADDLE_THROW("Expected %s " #__CMP " %s, but received %s " #__INV_CMP    \
                   " %s.\n%s",                                                 \
460 461 462 463 464 465 466
                   #__VAL1, #__VAL2,                                           \
                   ::paddle::platform::details::BinaryCompareMessageConverter< \
                       __kCanToString__>::Convert(#__VAL1, __val1),            \
                   ::paddle::platform::details::BinaryCompareMessageConverter< \
                       __kCanToString__>::Convert(#__VAL2, __val2),            \
                   ::paddle::string::Sprintf(__VA_ARGS__));                    \
    }                                                                          \
467
  } while (0)
W
wopeizl 已提交
468 469 470 471 472 473 474 475 476 477 478 479 480

#define PADDLE_ENFORCE_EQ(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_ENFORCE_NE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_ENFORCE_GT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_ENFORCE_GE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_ENFORCE_LT(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_ENFORCE_LE(__VAL0, __VAL1, ...) \
  __PADDLE_BINARY_COMPARE(__VAL0, __VAL1, <=, >, __VA_ARGS__)
S
add EQ  
Superjom 已提交
481

482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507
#define __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL1, __VAL2, __CMP,       \
                                           __INV_CMP, ...)                     \
  do {                                                                         \
    auto __val1 = (__VAL1);                                                    \
    auto __val2 = (__VAL2);                                                    \
    if (!__CTX->IsRuntime()) {                                                 \
      if (__val1 == -1 || __val2 == -1) {                                      \
        break;                                                                 \
      }                                                                        \
    }                                                                          \
    using __TYPE1__ = decltype(__val1);                                        \
    using __TYPE2__ = decltype(__val2);                                        \
    using __COMMON_TYPE1__ =                                                   \
        ::paddle::platform::details::CommonType1<__TYPE1__, __TYPE2__>;        \
    using __COMMON_TYPE2__ =                                                   \
        ::paddle::platform::details::CommonType2<__TYPE1__, __TYPE2__>;        \
    bool __is_not_error = (static_cast<__COMMON_TYPE1__>(__val1))__CMP(        \
        static_cast<__COMMON_TYPE2__>(__val2));                                \
    if (UNLIKELY(!__is_not_error)) {                                           \
      PADDLE_THROW("Expected %s " #__CMP " %s, but received %s:%s " #__INV_CMP \
                   " %s:%s.\n%s",                                              \
                   #__VAL1, #__VAL2, #__VAL1,                                  \
                   ::paddle::string::to_string(__val1), #__VAL2,               \
                   ::paddle::string::to_string(__val2),                        \
                   ::paddle::string::Sprintf(__VA_ARGS__));                    \
    }                                                                          \
X
xuezhong 已提交
508 509 510 511 512 513 514 515 516 517 518 519 520 521 522
  } while (0)

#define PADDLE_INFERSHAPE_ENFORCE_EQ(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, ==, !=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_NE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, !=, ==, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >, <=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_GE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, >=, <, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LT(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <, >=, __VA_ARGS__)
#define PADDLE_INFERSHAPE_ENFORCE_LE(__CTX, __VAL0, __VAL1, ...) \
  __PADDLE_INFERSHAPE_BINARY_COMPARE(__CTX, __VAL0, __VAL1, <=, >, __VA_ARGS__)

523 524
}  // namespace platform
}  // namespace paddle